mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-20 11:26:53 +00:00
remove artifact nodes no longer used
This commit is contained in:
parent
809bb5e804
commit
d6d76d6590
@ -18,65 +18,15 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.Lookup;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
|
||||
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
|
||||
import org.python.google.common.collect.Sets;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
|
||||
import org.sleuthkit.autopsy.corecomponents.SelectionResponder;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_ACCOUNT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_DATA_SOURCE_USAGE;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_EMAIL_MSG;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_HASHSET_HIT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_GEN_INFO;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_INTERESTING_ITEM;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_TL_EVENT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_ASSOCIATED_OBJECT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_KEYWORD_HIT;
|
||||
|
||||
/**
|
||||
* Classes for creating nodes for BlackboardArtifacts.
|
||||
*/
|
||||
public class Artifacts {
|
||||
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST
|
||||
= EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
|
||||
|
||||
/**
|
||||
* Base class for a parent node of artifacts.
|
||||
*/
|
||||
@ -128,626 +78,4 @@ public class Artifacts {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A key to be used with the type factory.
|
||||
*/
|
||||
private static class TypeNodeKey {
|
||||
|
||||
private final UpdatableCountTypeNode node;
|
||||
private final Set<BlackboardArtifact.Type> applicableTypes;
|
||||
|
||||
/**
|
||||
* Constructor generating a generic TypeNode for a given artifact type.
|
||||
*
|
||||
* @param type The type for the key.
|
||||
* @param dsObjId The data source object id if filtering should occur.
|
||||
* If no filtering should occur, this number should be
|
||||
* less than or equal to 0.
|
||||
*/
|
||||
TypeNodeKey(BlackboardArtifact.Type type, long dsObjId) {
|
||||
this(new TypeNode(type, dsObjId), type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for any UpdatableCountTypeNode.
|
||||
*
|
||||
* @param typeNode The UpdatableCountTypeNode.
|
||||
* @param types The blackboard artifact types corresponding to this
|
||||
* node.
|
||||
*/
|
||||
TypeNodeKey(UpdatableCountTypeNode typeNode, BlackboardArtifact.Type... types) {
|
||||
this.node = typeNode;
|
||||
this.applicableTypes = Stream.of(types)
|
||||
.filter(t -> t != null)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the node associated with this key.
|
||||
*
|
||||
* @return The node associated with this key.
|
||||
*/
|
||||
UpdatableCountTypeNode getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the blackboard artifact types associated with this key.
|
||||
*
|
||||
* @return The blackboard artifact types associated with this key.
|
||||
*/
|
||||
Set<BlackboardArtifact.Type> getApplicableTypes() {
|
||||
return applicableTypes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 3;
|
||||
hash = 61 * hash + Objects.hashCode(this.applicableTypes);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final TypeNodeKey other = (TypeNodeKey) obj;
|
||||
if (!Objects.equals(this.applicableTypes, other.applicableTypes)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory for showing a list of artifact types (i.e. all the data artifact
|
||||
* types).
|
||||
*/
|
||||
static class TypeFactory extends ChildFactory.Detachable<TypeNodeKey> implements RefreshThrottler.Refresher {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(TypeNode.class.getName());
|
||||
|
||||
/**
|
||||
* Types that should not be shown in the tree.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
private static final Set<BlackboardArtifact.Type> IGNORED_TYPES = Sets.newHashSet(
|
||||
// these are shown in other parts of the UI (and different node types)
|
||||
TSK_DATA_SOURCE_USAGE,
|
||||
TSK_GEN_INFO,
|
||||
new BlackboardArtifact.Type(TSK_DOWNLOAD_SOURCE),
|
||||
TSK_TL_EVENT,
|
||||
//This is not meant to be shown in the UI at all. It is more of a meta artifact.
|
||||
TSK_ASSOCIATED_OBJECT
|
||||
);
|
||||
|
||||
/**
|
||||
* Returns a Children key to be use for a particular artifact type.
|
||||
*
|
||||
* @param type The artifact type.
|
||||
* @param skCase The relevant Sleuthkit case in order to create the
|
||||
* node.
|
||||
* @param dsObjId The data source object id to use for filtering. If id
|
||||
* is less than or equal to 0, no filtering will occur.
|
||||
*
|
||||
* @return The generated key.
|
||||
*
|
||||
* @SuppressWarnings("deprecation") - we need to support already
|
||||
* existing interesting file and artifact hits.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
private static TypeNodeKey getTypeKey(BlackboardArtifact.Type type, SleuthkitCase skCase, long dsObjId) {
|
||||
int typeId = type.getTypeID();
|
||||
if (TSK_EMAIL_MSG.getTypeID() == typeId) {
|
||||
EmailExtracted.RootNode emailNode = new EmailExtracted(skCase, dsObjId).new RootNode();
|
||||
return new TypeNodeKey(emailNode, TSK_EMAIL_MSG);
|
||||
|
||||
} else if (TSK_KEYWORD_HIT.getTypeID() == typeId) {
|
||||
KeywordHits.RootNode keywordsNode = new KeywordHits(skCase, dsObjId).new RootNode();
|
||||
return new TypeNodeKey(keywordsNode, TSK_KEYWORD_HIT);
|
||||
|
||||
} else if (TSK_INTERESTING_ITEM.getTypeID() == typeId) {
|
||||
InterestingHits.RootNode interestingHitsNode = new InterestingHits(skCase, TSK_INTERESTING_ITEM, dsObjId).new RootNode();
|
||||
return new TypeNodeKey(interestingHitsNode, TSK_INTERESTING_ITEM);
|
||||
} else if (TSK_INTERESTING_ARTIFACT_HIT.getTypeID() == typeId) {
|
||||
InterestingHits.RootNode interestingHitsNode = new InterestingHits(skCase, TSK_INTERESTING_ARTIFACT_HIT, dsObjId).new RootNode();
|
||||
return new TypeNodeKey(interestingHitsNode, TSK_INTERESTING_ARTIFACT_HIT);
|
||||
} else if (TSK_INTERESTING_FILE_HIT.getTypeID() == typeId) {
|
||||
InterestingHits.RootNode interestingHitsNode = new InterestingHits(skCase, TSK_INTERESTING_FILE_HIT, dsObjId).new RootNode();
|
||||
return new TypeNodeKey(interestingHitsNode, TSK_INTERESTING_FILE_HIT);
|
||||
} else if (TSK_HASHSET_HIT.getTypeID() == typeId) {
|
||||
HashsetHits.RootNode hashsetHits = new HashsetHits(skCase, dsObjId).new RootNode();
|
||||
return new TypeNodeKey(hashsetHits, TSK_HASHSET_HIT);
|
||||
|
||||
} else {
|
||||
return new TypeNodeKey(type, dsObjId);
|
||||
}
|
||||
}
|
||||
|
||||
// maps the artifact type to its child node
|
||||
private final Map<BlackboardArtifact.Type, TypeNodeKey> typeNodeMap = new HashMap<>();
|
||||
private final long filteringDSObjId;
|
||||
|
||||
/**
|
||||
* RefreshThrottler is used to limit the number of refreshes performed
|
||||
* when CONTENT_CHANGED and DATA_ADDED ingest module events are
|
||||
* received.
|
||||
*/
|
||||
private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
|
||||
private final Category category;
|
||||
|
||||
private final PropertyChangeListener weakPcl;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param category The category of types to be displayed.
|
||||
* @param filteringDSObjId The data source object id to use for
|
||||
* filtering. If id is less than or equal to 0,
|
||||
* no filtering will occur.
|
||||
*/
|
||||
TypeFactory(Category category, long filteringDSObjId) {
|
||||
super();
|
||||
this.filteringDSObjId = filteringDSObjId;
|
||||
this.category = category;
|
||||
|
||||
PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
|
||||
// case was closed. Remove listeners so that we don't get called with a stale case handle
|
||||
if (evt.getNewValue() == null) {
|
||||
removeNotify();
|
||||
}
|
||||
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* This is a stop gap measure until a different way of
|
||||
* handling the closing of cases is worked out. Currently,
|
||||
* remote events may be received for a case that is already
|
||||
* closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
refresh(false);
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
super.addNotify();
|
||||
refreshThrottler.registerForIngestModuleEvents();
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
refreshThrottler.unregisterEventListener();
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
typeNodeMap.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<TypeNodeKey> list) {
|
||||
try {
|
||||
// Get all types in use
|
||||
SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
|
||||
List<BlackboardArtifact.Type> types = (this.filteringDSObjId > 0)
|
||||
? skCase.getBlackboard().getArtifactTypesInUse(this.filteringDSObjId)
|
||||
: skCase.getArtifactTypesInUse();
|
||||
|
||||
List<TypeNodeKey> allKeysSorted = types.stream()
|
||||
// filter types by category and ensure they are not in the list of ignored types
|
||||
.filter(tp -> category.equals(tp.getCategory()) && !IGNORED_TYPES.contains(tp))
|
||||
.map(tp -> {
|
||||
// if typeNodeMap already contains key, update the relevant node and return the node
|
||||
if (typeNodeMap.containsKey(tp)) {
|
||||
TypeNodeKey typeKey = typeNodeMap.get(tp);
|
||||
typeKey.getNode().updateDisplayName();
|
||||
return typeKey;
|
||||
} else {
|
||||
// if key is not in map, create the type key and add to map
|
||||
TypeNodeKey newTypeKey = getTypeKey(tp, skCase, filteringDSObjId);
|
||||
for (BlackboardArtifact.Type recordType : newTypeKey.getApplicableTypes()) {
|
||||
typeNodeMap.put(recordType, newTypeKey);
|
||||
}
|
||||
return newTypeKey;
|
||||
}
|
||||
})
|
||||
// ensure record is returned
|
||||
.filter(record -> record != null)
|
||||
// there are potentially multiple types that apply to the same node (i.e. Interesting Files / Artifacts)
|
||||
// ensure the keys are distinct
|
||||
.distinct()
|
||||
// sort by display name
|
||||
.sorted((a, b) -> {
|
||||
String aSafe = (a.getNode() == null || a.getNode().getDisplayName() == null) ? "" : a.getNode().getDisplayName();
|
||||
String bSafe = (b.getNode() == null || b.getNode().getDisplayName() == null) ? "" : b.getNode().getDisplayName();
|
||||
return aSafe.compareToIgnoreCase(bSafe);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
|
||||
list.addAll(allKeysSorted);
|
||||
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.WARNING, "Trying to access case when no case is open.", ex); //NON-NLS
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting list of artifacts in use: " + ex.getLocalizedMessage()); //NON-NLS
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(TypeNodeKey key) {
|
||||
return key.getNode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refresh() {
|
||||
refresh(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* This is a stop gap measure until a different way of handling
|
||||
* the closing of cases is worked out. Currently, remote events
|
||||
* may be received for a case that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Due to some unresolved issues with how cases are closed,
|
||||
* it is possible for the event to have a null oldValue if
|
||||
* the event is a remote event.
|
||||
*/
|
||||
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != event && category.equals(event.getBlackboardArtifactType().getCategory())
|
||||
&& !(IGNORED_TYPES.contains(event.getBlackboardArtifactType()))) {
|
||||
return true;
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract class for type(s) nodes. This class allows for displaying a
|
||||
* count artifacts with the type(s) associated with this node.
|
||||
*/
|
||||
public static abstract class UpdatableCountTypeNode extends DisplayableItemNode {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(UpdatableCountTypeNode.class.getName());
|
||||
|
||||
private final Set<BlackboardArtifact.Type> types;
|
||||
private final long filteringDSObjId;
|
||||
private long childCount = 0;
|
||||
private final String baseName;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param children The Children to associated with this node.
|
||||
* @param lookup The Lookup to use with this name.
|
||||
* @param baseName The display name. The Node.displayName will
|
||||
* be of format "[baseName] ([count])".
|
||||
* @param filteringDSObjId The data source object id to use for
|
||||
* filtering. If id is less than or equal to 0,
|
||||
* no filtering will occur.
|
||||
* @param types The types associated with this type node.
|
||||
*/
|
||||
public UpdatableCountTypeNode(Children children, Lookup lookup, String baseName,
|
||||
long filteringDSObjId, BlackboardArtifact.Type... types) {
|
||||
|
||||
super(children, lookup);
|
||||
this.types = Stream.of(types).collect(Collectors.toSet());
|
||||
this.filteringDSObjId = filteringDSObjId;
|
||||
this.baseName = baseName;
|
||||
updateDisplayName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the count of artifacts associated with these type(s).
|
||||
*
|
||||
* @return The count of artifacts associated with these type(s).
|
||||
*/
|
||||
protected long getChildCount() {
|
||||
return this.childCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the count to be displayed from the case.
|
||||
*
|
||||
* @param skCase The relevant SleuthkitCase.
|
||||
*
|
||||
* @return The count to be displayed.
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
protected long fetchChildCount(SleuthkitCase skCase) throws TskCoreException {
|
||||
int count = 0;
|
||||
for (BlackboardArtifact.Type type : this.types) {
|
||||
if (filteringDSObjId > 0) {
|
||||
count += skCase.getBlackboard().getArtifactsCount(type.getTypeID(), filteringDSObjId);
|
||||
} else {
|
||||
count += skCase.getBlackboardArtifactsTypeCount(type.getTypeID());
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
long getFilteringDataSourceId() {
|
||||
return filteringDSObjId;
|
||||
}
|
||||
|
||||
/**
|
||||
* When this method is called, the count to be displayed will be
|
||||
* updated.
|
||||
*/
|
||||
void updateDisplayName() {
|
||||
try {
|
||||
SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
|
||||
this.childCount = fetchChildCount(skCase);
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.WARNING, "Error fetching data when case closed.", ex);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Error getting child count", ex); //NON-NLS
|
||||
}
|
||||
super.setDisplayName(this.baseName + " \u200E(\u200E" + this.childCount + ")\u200E");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Default node encapsulating a blackboard artifact type. This is used on
|
||||
* the left-hand navigation side of the Autopsy UI as the parent node for
|
||||
* all of the artifacts of a given type. Its children will be
|
||||
* BlackboardArtifactNode objects.
|
||||
*/
|
||||
static class TypeNode extends UpdatableCountTypeNode implements SelectionResponder{
|
||||
|
||||
private final BlackboardArtifact.Type type;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param type The blackboard artifact type for this node.
|
||||
* @param filteringDSObjId The data source object id to use for
|
||||
* filtering. If id is less than or equal to 0,
|
||||
* no filtering will occur.
|
||||
*/
|
||||
TypeNode(BlackboardArtifact.Type type, long filteringDSObjId) {
|
||||
super(new Children.Array(),
|
||||
Lookups.fixed(type.getDisplayName()),
|
||||
type.getDisplayName(),
|
||||
filteringDSObjId,
|
||||
type);
|
||||
|
||||
super.setName(type.getTypeName());
|
||||
this.type = type;
|
||||
String iconPath = IconsUtil.getIconFilePath(type.getTypeID());
|
||||
setIconBaseWithExtension(iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
switch(type.getCategory()) {
|
||||
case DATA_ARTIFACT:
|
||||
dataResultPanel.displayDataArtifact(new DataArtifactSearchParam(type, getFilteringDataSourceId() > 0 ? getFilteringDataSourceId() : null)) ;
|
||||
break;
|
||||
default:
|
||||
dataResultPanel.displayAnalysisResult(new AnalysisResultSearchParam(type, getFilteringDataSourceId() > 0 ? getFilteringDataSourceId() : null));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.desc"),
|
||||
type.getDisplayName()));
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.name"),
|
||||
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.desc"),
|
||||
getChildCount()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName() + type.getDisplayName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates children for a given artifact type
|
||||
*/
|
||||
private static class ArtifactFactory extends BaseChildFactory<BlackboardArtifact> implements RefreshThrottler.Refresher {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ArtifactFactory.class.getName());
|
||||
private final BlackboardArtifact.Type type;
|
||||
|
||||
/**
|
||||
* RefreshThrottler is used to limit the number of refreshes performed
|
||||
* when CONTENT_CHANGED and DATA_ADDED ingest module events are
|
||||
* received.
|
||||
*/
|
||||
private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
|
||||
private final long filteringDSObjId;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param type The blackboard artifact type for this node.
|
||||
* @param filteringDSObjId The data source object id to use for
|
||||
* filtering. If id is less than or equal to 0,
|
||||
* no filtering will occur.
|
||||
*/
|
||||
ArtifactFactory(BlackboardArtifact.Type type, long filteringDSObjId) {
|
||||
super(type.getTypeName());
|
||||
this.type = type;
|
||||
this.filteringDSObjId = filteringDSObjId;
|
||||
}
|
||||
|
||||
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked out.
|
||||
* Currently, remote events may be received for a case that is
|
||||
* already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
refresh(false);
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
|
||||
@Override
|
||||
protected void onAdd() {
|
||||
refreshThrottler.registerForIngestModuleEvents();
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRemove() {
|
||||
if (refreshThrottler != null) {
|
||||
refreshThrottler.unregisterEventListener();
|
||||
}
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(BlackboardArtifact key) {
|
||||
return new BlackboardArtifactNode(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<BlackboardArtifact> makeKeys() {
|
||||
try {
|
||||
List<? extends BlackboardArtifact> arts;
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
switch (this.type.getCategory()) {
|
||||
|
||||
case ANALYSIS_RESULT:
|
||||
arts = (filteringDSObjId > 0)
|
||||
? blackboard.getAnalysisResultsByType(type.getTypeID(), filteringDSObjId)
|
||||
: blackboard.getAnalysisResultsByType(type.getTypeID());
|
||||
break;
|
||||
case DATA_ARTIFACT:
|
||||
default:
|
||||
arts = (filteringDSObjId > 0)
|
||||
? blackboard.getDataArtifacts(type.getTypeID(), filteringDSObjId)
|
||||
: blackboard.getDataArtifacts(type.getTypeID());
|
||||
break;
|
||||
}
|
||||
|
||||
for (BlackboardArtifact art : arts) {
|
||||
//Cache attributes while we are off the EDT.
|
||||
//See JIRA-5969
|
||||
art.getAttributes();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<BlackboardArtifact> toRet = (List<BlackboardArtifact>) (List<?>) arts;
|
||||
return toRet;
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.WARNING, "Trying to access case when no case is open.", ex); //NON-NLS
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Couldn't get blackboard artifacts from database", ex); //NON-NLS
|
||||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refresh() {
|
||||
refresh(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked out.
|
||||
* Currently, remote events may be received for a case that is
|
||||
* already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Even with the check above, it is still possible that the
|
||||
* case will be closed in a different thread before this
|
||||
* code executes. If that happens, it is possible for the
|
||||
* event to have a null oldValue.
|
||||
*/
|
||||
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != event && event.getBlackboardArtifactType().equals(type)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -33,18 +33,6 @@ public interface AutopsyItemVisitor<T> {
|
||||
|
||||
T visit(Views v);
|
||||
|
||||
T visit(RecentFiles rf);
|
||||
|
||||
T visit(RecentFiles.RecentFilesFilter rff);
|
||||
|
||||
T visit(KeywordHits kh);
|
||||
|
||||
T visit(HashsetHits hh);
|
||||
|
||||
T visit(EmailExtracted ee);
|
||||
|
||||
T visit(InterestingHits ih);
|
||||
|
||||
T visit(Tags tagsNodeKey);
|
||||
|
||||
T visit(Reports reportsItem);
|
||||
@ -68,36 +56,6 @@ public interface AutopsyItemVisitor<T> {
|
||||
|
||||
protected abstract T defaultVisit(AutopsyVisitableItem ec);
|
||||
|
||||
@Override
|
||||
public T visit(RecentFiles rf) {
|
||||
return defaultVisit(rf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(RecentFiles.RecentFilesFilter rff) {
|
||||
return defaultVisit(rff);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(KeywordHits kh) {
|
||||
return defaultVisit(kh);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(HashsetHits hh) {
|
||||
return defaultVisit(hh);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(InterestingHits ih) {
|
||||
return defaultVisit(ih);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(EmailExtracted ee) {
|
||||
return defaultVisit(ee);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(Tags tagsNodeKey) {
|
||||
return defaultVisit(tagsNodeKey);
|
||||
|
@ -144,30 +144,6 @@ ImageNode.createSheet.type.name=Type
|
||||
ImageNode.createSheet.type.text=Image
|
||||
ImageNode.getActions.openFileSearchByAttr.text=Open File Search by Attributes
|
||||
KeyValueNode.menuItemText.viewFileInDir=View Source File in Directory
|
||||
KeywordHits.createNodeForKey.accessTime.desc=Access Time
|
||||
KeywordHits.createNodeForKey.accessTime.displayName=Access Time
|
||||
KeywordHits.createNodeForKey.accessTime.name=AccessTime
|
||||
KeywordHits.createNodeForKey.chgTime.desc=Change Time
|
||||
KeywordHits.createNodeForKey.chgTime.displayName=Change Time
|
||||
KeywordHits.createNodeForKey.chgTime.name=ChangeTime
|
||||
KeywordHits.createNodeForKey.modTime.desc=Modified Time
|
||||
KeywordHits.createNodeForKey.modTime.displayName=Modified Time
|
||||
KeywordHits.createNodeForKey.modTime.name=ModifiedTime
|
||||
KeywordHits.createSheet.filesWithHits.desc=no description
|
||||
KeywordHits.createSheet.filesWithHits.displayName=Files with Hits
|
||||
KeywordHits.createSheet.filesWithHits.name=Files with Hits
|
||||
KeywordHits.createSheet.listName.desc=no description
|
||||
KeywordHits.createSheet.listName.displayName=List Name
|
||||
KeywordHits.createSheet.listName.name=List Name
|
||||
KeywordHits.createSheet.name.desc=no description
|
||||
KeywordHits.createSheet.name.displayName=Name
|
||||
KeywordHits.createSheet.name.name=Name
|
||||
KeywordHits.createSheet.numChildren.desc=no description
|
||||
KeywordHits.createSheet.numChildren.displayName=Number of Children
|
||||
KeywordHits.createSheet.numChildren.name=Number of Children
|
||||
KeywordHits.kwHits.text=Keyword Hits
|
||||
KeywordHits.simpleLiteralSearch.text=Single Literal Keyword Search
|
||||
KeywordHits.singleRegexSearch.text=Single Regular Expression Search
|
||||
LayoutFileNode.getActions.viewFileInDir.text=View File in Directory
|
||||
LocalFilesDataSourceNode.createSheet.deviceId.desc=Device ID of the image
|
||||
LocalFilesDataSourceNode.createSheet.deviceId.displayName=Device ID
|
||||
|
@ -66,38 +66,10 @@ public interface DisplayableItemNodeVisitor<T> {
|
||||
|
||||
T visit(DataSourceGroupingNode dataSourceGroupingNode);
|
||||
|
||||
T visit(RecentFilesNode rfn);
|
||||
|
||||
T visit(RecentFilesFilterNode rffn);
|
||||
|
||||
T visit(BlackboardArtifactNode ban);
|
||||
|
||||
T visit(Artifacts.TypeNode atn);
|
||||
|
||||
T visit(Artifacts.BaseArtifactNode ecn);
|
||||
|
||||
T visit(KeywordHits.RootNode khrn);
|
||||
|
||||
T visit(KeywordHits.ListNode khsn);
|
||||
|
||||
T visit(KeywordHits.TermNode khmln);
|
||||
|
||||
T visit(KeywordHits.RegExpInstanceNode khmln);
|
||||
|
||||
T visit(HashsetHits.RootNode hhrn);
|
||||
|
||||
T visit(HashsetHits.HashsetNameNode hhsn);
|
||||
|
||||
T visit(EmailExtracted.RootNode eern);
|
||||
|
||||
T visit(EmailExtracted.AccountNode eean);
|
||||
|
||||
T visit(EmailExtracted.FolderNode eefn);
|
||||
|
||||
T visit(InterestingHits.RootNode ihrn);
|
||||
|
||||
T visit(InterestingHits.SetNameNode ihsn);
|
||||
|
||||
T visit(CommonAttributeValueNode cavn);
|
||||
|
||||
T visit(CommonAttributeSearchResultRootNode cfn);
|
||||
@ -247,11 +219,6 @@ public interface DisplayableItemNodeVisitor<T> {
|
||||
return defaultVisit(ban);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(Artifacts.TypeNode atn) {
|
||||
return defaultVisit(atn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(Artifacts.BaseArtifactNode ecn) {
|
||||
return defaultVisit(ecn);
|
||||
@ -262,36 +229,6 @@ public interface DisplayableItemNodeVisitor<T> {
|
||||
return defaultVisit(ftByMimeTypeEmptyNode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(RecentFilesNode rfn) {
|
||||
return defaultVisit(rfn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(RecentFilesFilterNode rffn) {
|
||||
return defaultVisit(rffn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(KeywordHits.RootNode khrn) {
|
||||
return defaultVisit(khrn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(KeywordHits.ListNode khsn) {
|
||||
return defaultVisit(khsn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(KeywordHits.RegExpInstanceNode khsn) {
|
||||
return defaultVisit(khsn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(KeywordHits.TermNode khmln) {
|
||||
return defaultVisit(khmln);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(ViewsNode vn) {
|
||||
return defaultVisit(vn);
|
||||
@ -307,41 +244,6 @@ public interface DisplayableItemNodeVisitor<T> {
|
||||
return defaultVisit(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(HashsetHits.RootNode hhrn) {
|
||||
return defaultVisit(hhrn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(HashsetHits.HashsetNameNode hhsn) {
|
||||
return defaultVisit(hhsn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(InterestingHits.RootNode ihrn) {
|
||||
return defaultVisit(ihrn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(InterestingHits.SetNameNode ihsn) {
|
||||
return defaultVisit(ihsn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(EmailExtracted.RootNode eern) {
|
||||
return defaultVisit(eern);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(EmailExtracted.AccountNode eean) {
|
||||
return defaultVisit(eean);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(EmailExtracted.FolderNode eefn) {
|
||||
return defaultVisit(eefn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public T visit(LayoutFileNode lfn) {
|
||||
return defaultVisit(lfn);
|
||||
|
@ -1,576 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Observable;
|
||||
import java.util.Observer;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_EMAIL_MSG;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.datamodel.Artifacts.UpdatableCountTypeNode;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
|
||||
/**
|
||||
* Support for TSK_EMAIL_MSG nodes and displaying emails in the directory tree.
|
||||
* Email messages are grouped into parent folders, and the folders are grouped
|
||||
* into parent accounts if TSK_PATH is available to define the relationship
|
||||
* structure for every message.
|
||||
*/
|
||||
public class EmailExtracted implements AutopsyVisitableItem {
|
||||
|
||||
private static final String LABEL_NAME = BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeName();
|
||||
private static final Logger logger = Logger.getLogger(EmailExtracted.class.getName());
|
||||
private static final String MAIL_ACCOUNT = NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.mailAccount.text");
|
||||
private static final String MAIL_FOLDER = NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.mailFolder.text");
|
||||
private static final String MAIL_PATH_SEPARATOR = "/";
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
|
||||
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.DATA_ADDED);
|
||||
|
||||
/**
|
||||
* Parse the path of the email msg to get the account name and folder in
|
||||
* which the email is contained.
|
||||
*
|
||||
* @param path - the TSK_PATH to the email msg
|
||||
*
|
||||
* @return a map containg the account and folder which the email is stored
|
||||
* in
|
||||
*/
|
||||
public static final Map<String, String> parsePath(String path) {
|
||||
Map<String, String> parsed = new HashMap<>();
|
||||
String[] split = path == null ? new String[0] : path.split(MAIL_PATH_SEPARATOR);
|
||||
if (split.length < 4) {
|
||||
parsed.put(MAIL_ACCOUNT, NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultAcct.text"));
|
||||
parsed.put(MAIL_FOLDER, NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultFolder.text"));
|
||||
return parsed;
|
||||
}
|
||||
parsed.put(MAIL_ACCOUNT, split[2]);
|
||||
parsed.put(MAIL_FOLDER, split[3]);
|
||||
return parsed;
|
||||
}
|
||||
private SleuthkitCase skCase;
|
||||
private final EmailResults emailResults;
|
||||
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
*/
|
||||
public EmailExtracted(SleuthkitCase skCase) {
|
||||
this(skCase, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
* @param objId Object id of the data source
|
||||
*
|
||||
*/
|
||||
public EmailExtracted(SleuthkitCase skCase, long objId) {
|
||||
this.skCase = skCase;
|
||||
this.filteringDSObjId = objId;
|
||||
emailResults = new EmailResults();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(AutopsyItemVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
private final class EmailResults extends Observable {
|
||||
|
||||
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
|
||||
private final Map<String, Map<String, List<Long>>> accounts = new LinkedHashMap<>();
|
||||
|
||||
EmailResults() {
|
||||
update();
|
||||
}
|
||||
|
||||
public Set<String> getAccounts() {
|
||||
synchronized (accounts) {
|
||||
return accounts.keySet();
|
||||
}
|
||||
}
|
||||
|
||||
public Set<String> getFolders(String account) {
|
||||
synchronized (accounts) {
|
||||
return accounts.get(account).keySet();
|
||||
}
|
||||
}
|
||||
|
||||
public List<Long> getArtifactIds(String account, String folder) {
|
||||
synchronized (accounts) {
|
||||
return accounts.get(account).get(folder);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public void update() {
|
||||
// clear cache if no case
|
||||
if (skCase == null) {
|
||||
synchronized (accounts) {
|
||||
accounts.clear();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// get artifact id and path (if present) of all email artifacts
|
||||
int emailArtifactId = BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID();
|
||||
int pathAttrId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH.getTypeID();
|
||||
|
||||
String query = "SELECT \n"
|
||||
+ " art.artifact_obj_id AS artifact_obj_id,\n"
|
||||
+ " (SELECT value_text FROM blackboard_attributes attr\n"
|
||||
+ " WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = " + pathAttrId + "\n"
|
||||
+ " LIMIT 1) AS value_text\n"
|
||||
+ "FROM \n"
|
||||
+ " blackboard_artifacts art\n"
|
||||
+ " WHERE art.artifact_type_id = " + emailArtifactId + "\n"
|
||||
+ ((filteringDSObjId > 0) ? " AND art.data_source_obj_id = " + filteringDSObjId : "");
|
||||
|
||||
// form hierarchy of account -> folder -> account id
|
||||
Map<String, Map<String, List<Long>>> newMapping = new HashMap<>();
|
||||
|
||||
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
|
||||
ResultSet resultSet = dbQuery.getResultSet();
|
||||
while (resultSet.next()) {
|
||||
Long artifactObjId = resultSet.getLong("artifact_obj_id");
|
||||
Map<String, String> accountFolderMap = parsePath(resultSet.getString("value_text"));
|
||||
String account = accountFolderMap.get(MAIL_ACCOUNT);
|
||||
String folder = accountFolderMap.get(MAIL_FOLDER);
|
||||
|
||||
Map<String, List<Long>> folders = newMapping.computeIfAbsent(account, (str) -> new LinkedHashMap<>());
|
||||
List<Long> messages = folders.computeIfAbsent(folder, (str) -> new ArrayList<>());
|
||||
messages.add(artifactObjId);
|
||||
}
|
||||
} catch (TskCoreException | SQLException ex) {
|
||||
logger.log(Level.WARNING, "Cannot initialize email extraction: ", ex); //NON-NLS
|
||||
}
|
||||
|
||||
synchronized (accounts) {
|
||||
accounts.clear();
|
||||
accounts.putAll(newMapping);
|
||||
}
|
||||
|
||||
setChanged();
|
||||
notifyObservers();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mail root node grouping all mail accounts, supports account-> folder
|
||||
* structure
|
||||
*/
|
||||
public class RootNode extends UpdatableCountTypeNode {
|
||||
|
||||
public RootNode() {
|
||||
super(Children.create(new AccountFactory(), true),
|
||||
Lookups.singleton(TSK_EMAIL_MSG.getDisplayName()),
|
||||
TSK_EMAIL_MSG.getDisplayName(),
|
||||
filteringDSObjId,
|
||||
TSK_EMAIL_MSG);
|
||||
//super(Children.create(new AccountFactory(), true), Lookups.singleton(DISPLAY_NAME));
|
||||
super.setName(LABEL_NAME);
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/mail-icon-16.png"); //NON-NLS
|
||||
emailResults.update();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mail root child node creating each account node
|
||||
*/
|
||||
private class AccountFactory extends ChildFactory.Detachable<String> implements Observer {
|
||||
|
||||
/*
|
||||
* The pcl is in the class because it has the easiest mechanisms to add
|
||||
* and remove itself during its life cycles.
|
||||
*/
|
||||
private final PropertyChangeListener pcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked
|
||||
* out. Currently, remote events may be received for a case
|
||||
* that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Even with the check above, it is still possible that
|
||||
* the case will be closed in a different thread before
|
||||
* this code executes. If that happens, it is possible
|
||||
* for the event to have a null oldValue.
|
||||
*/
|
||||
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_EMAIL_MSG.getTypeID()) {
|
||||
emailResults.update();
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked
|
||||
* out. Currently, remote events may be received for a case
|
||||
* that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
emailResults.update();
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
|
||||
// case was closed. Remove listeners so that we don't get called with a stale case handle
|
||||
if (evt.getNewValue() == null) {
|
||||
removeNotify();
|
||||
skCase = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
emailResults.update();
|
||||
emailResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
IngestManager.getInstance().removeIngestModuleEventListener(weakPcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
emailResults.deleteObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(emailResults.getAccounts());
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String key) {
|
||||
return new AccountNode(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Account node representation
|
||||
*/
|
||||
public class AccountNode extends DisplayableItemNode implements Observer {
|
||||
|
||||
private final String accountName;
|
||||
|
||||
public AccountNode(String accountName) {
|
||||
super(Children.create(new FolderFactory(accountName), true), Lookups.singleton(accountName));
|
||||
super.setName(accountName);
|
||||
this.accountName = accountName;
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/account-icon-16.png"); //NON-NLS
|
||||
updateDisplayName();
|
||||
emailResults.addObserver(this);
|
||||
}
|
||||
|
||||
private void updateDisplayName() {
|
||||
super.setDisplayName(accountName + " (" + emailResults.getFolders(accountName) + ")");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
updateDisplayName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Account node child creating sub nodes for every folder
|
||||
*/
|
||||
private class FolderFactory extends ChildFactory<String> implements Observer {
|
||||
|
||||
private final String accountName;
|
||||
|
||||
private FolderFactory(String accountName) {
|
||||
super();
|
||||
this.accountName = accountName;
|
||||
emailResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(emailResults.getFolders(accountName));
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String folderName) {
|
||||
return new FolderNode(accountName, folderName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the key for the parent node and child factory is the same to
|
||||
* ensure that the BaseChildFactory registered listener node name
|
||||
* (BaseChildFactory.register and DataResultViewerTable.setNode with event
|
||||
* registration) is the same as the factory name that will post events from
|
||||
* BaseChildFactory.post called in BaseChildFactory.makeKeys. See JIRA-7752
|
||||
* for more details.
|
||||
*
|
||||
* @param accountName The account name.
|
||||
* @param folderName The folder name.
|
||||
*
|
||||
* @return The generated key.
|
||||
*/
|
||||
private static String getFolderKey(String accountName, String folderName) {
|
||||
return accountName + "_" + folderName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Node representing mail folder
|
||||
*/
|
||||
public class FolderNode extends DisplayableItemNode implements Observer {
|
||||
|
||||
private final String accountName;
|
||||
private final String folderName;
|
||||
|
||||
public FolderNode(String accountName, String folderName) {
|
||||
super(Children.create(new MessageFactory(accountName, folderName), true), Lookups.singleton(accountName));
|
||||
super.setName(getFolderKey(accountName, folderName));
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-16.png"); //NON-NLS
|
||||
this.accountName = accountName;
|
||||
this.folderName = folderName;
|
||||
updateDisplayName();
|
||||
emailResults.addObserver(this);
|
||||
}
|
||||
|
||||
private void updateDisplayName() {
|
||||
super.setDisplayName(folderName + " (" + emailResults.getArtifactIds(accountName, folderName).size() + ")");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
updateDisplayName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Node representing mail folder content (mail messages)
|
||||
*/
|
||||
private class MessageFactory extends BaseChildFactory<DataArtifact> implements Observer {
|
||||
|
||||
private final String accountName;
|
||||
private final String folderName;
|
||||
|
||||
private MessageFactory(String accountName, String folderName) {
|
||||
super(getFolderKey(accountName, folderName));
|
||||
this.accountName = accountName;
|
||||
this.folderName = folderName;
|
||||
emailResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(DataArtifact art) {
|
||||
return new BlackboardArtifactNode(art);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<DataArtifact> makeKeys() {
|
||||
List<DataArtifact> keys = new ArrayList<>();
|
||||
|
||||
if (skCase != null) {
|
||||
emailResults.getArtifactIds(accountName, folderName).forEach((id) -> {
|
||||
try {
|
||||
DataArtifact art = skCase.getBlackboard().getDataArtifactById(id);
|
||||
//Cache attributes while we are off the EDT.
|
||||
//See JIRA-5969
|
||||
art.getAttributes();
|
||||
keys.add(art);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Error getting mail messages keys", ex); //NON-NLS
|
||||
}
|
||||
});
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onAdd() {
|
||||
// No-op
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRemove() {
|
||||
// No-op
|
||||
}
|
||||
}
|
||||
}
|
@ -1,449 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Observable;
|
||||
import java.util.Observer;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_HASHSET_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.datamodel.Artifacts.UpdatableCountTypeNode;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam;
|
||||
import org.sleuthkit.autopsy.corecomponents.SelectionResponder;
|
||||
|
||||
/**
|
||||
* Hash set hits node support. Inner classes have all of the nodes in the tree.
|
||||
*/
|
||||
public class HashsetHits implements AutopsyVisitableItem {
|
||||
|
||||
private static final String HASHSET_HITS = BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeName();
|
||||
private static final String DISPLAY_NAME = BlackboardArtifact.Type.TSK_HASHSET_HIT.getDisplayName();
|
||||
private static final Logger logger = Logger.getLogger(HashsetHits.class.getName());
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
|
||||
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.DATA_ADDED);
|
||||
private SleuthkitCase skCase;
|
||||
private final HashsetResults hashsetResults;
|
||||
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
*
|
||||
*/
|
||||
public HashsetHits(SleuthkitCase skCase) {
|
||||
this(skCase, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
* @param objId Object id of the data source
|
||||
*
|
||||
*/
|
||||
public HashsetHits(SleuthkitCase skCase, long objId) {
|
||||
this.skCase = skCase;
|
||||
this.filteringDSObjId = objId;
|
||||
hashsetResults = new HashsetResults();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(AutopsyItemVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores all of the hashset results in a single class that is observable
|
||||
* for the child nodes
|
||||
*/
|
||||
private class HashsetResults extends Observable {
|
||||
|
||||
// maps hashset name to list of artifacts for that set
|
||||
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
|
||||
private final Map<String, Set<Long>> hashSetHitsMap = new LinkedHashMap<>();
|
||||
|
||||
HashsetResults() {
|
||||
update();
|
||||
}
|
||||
|
||||
List<String> getSetNames() {
|
||||
List<String> names;
|
||||
synchronized (hashSetHitsMap) {
|
||||
names = new ArrayList<>(hashSetHitsMap.keySet());
|
||||
}
|
||||
Collections.sort(names);
|
||||
return names;
|
||||
}
|
||||
|
||||
Set<Long> getArtifactIds(String hashSetName) {
|
||||
synchronized (hashSetHitsMap) {
|
||||
return hashSetHitsMap.get(hashSetName);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
final void update() {
|
||||
synchronized (hashSetHitsMap) {
|
||||
hashSetHitsMap.clear();
|
||||
}
|
||||
|
||||
if (skCase == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
int setNameId = ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID();
|
||||
int artId = TSK_HASHSET_HIT.getTypeID();
|
||||
String query = "SELECT value_text,blackboard_artifacts.artifact_obj_id,attribute_type_id " //NON-NLS
|
||||
+ "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
|
||||
+ "attribute_type_id=" + setNameId //NON-NLS
|
||||
+ " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS
|
||||
+ " AND blackboard_artifacts.artifact_type_id=" + artId; //NON-NLS
|
||||
if (filteringDSObjId > 0) {
|
||||
query += " AND blackboard_artifacts.data_source_obj_id = " + filteringDSObjId;
|
||||
}
|
||||
|
||||
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
|
||||
ResultSet resultSet = dbQuery.getResultSet();
|
||||
synchronized (hashSetHitsMap) {
|
||||
while (resultSet.next()) {
|
||||
String setName = resultSet.getString("value_text"); //NON-NLS
|
||||
long artifactObjId = resultSet.getLong("artifact_obj_id"); //NON-NLS
|
||||
if (!hashSetHitsMap.containsKey(setName)) {
|
||||
hashSetHitsMap.put(setName, new HashSet<>());
|
||||
}
|
||||
hashSetHitsMap.get(setName).add(artifactObjId);
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException | SQLException ex) {
|
||||
logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS
|
||||
}
|
||||
|
||||
setChanged();
|
||||
notifyObservers();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Top-level node for all hash sets
|
||||
*/
|
||||
public class RootNode extends UpdatableCountTypeNode {
|
||||
|
||||
public RootNode() {
|
||||
super(Children.create(new HashsetNameFactory(), true),
|
||||
Lookups.singleton(DISPLAY_NAME),
|
||||
DISPLAY_NAME,
|
||||
filteringDSObjId,
|
||||
TSK_HASHSET_HIT);
|
||||
|
||||
super.setName(HASHSET_HITS);
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hashset_hits.png"); //NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.desc"),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates child nodes for each hashset name
|
||||
*/
|
||||
private class HashsetNameFactory extends ChildFactory.Detachable<String> implements Observer {
|
||||
|
||||
/*
|
||||
* This should probably be in the HashsetHits class, but the factory has
|
||||
* nice methods for its startup and shutdown, so it seemed like a
|
||||
* cleaner place to register the property change listener.
|
||||
*/
|
||||
private final PropertyChangeListener pcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked
|
||||
* out. Currently, remote events may be received for a case
|
||||
* that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Due to some unresolved issues with how cases are
|
||||
* closed, it is possible for the event to have a null
|
||||
* oldValue if the event is a remote event.
|
||||
*/
|
||||
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == TSK_HASHSET_HIT.getTypeID()) {
|
||||
hashsetResults.update();
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked
|
||||
* out. Currently, remote events may be received for a case
|
||||
* that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
hashsetResults.update();
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
|
||||
// case was closed. Remove listeners so that we don't get called with a stale case handle
|
||||
if (evt.getNewValue() == null) {
|
||||
removeNotify();
|
||||
skCase = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
hashsetResults.update();
|
||||
hashsetResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
IngestManager.getInstance().removeIngestModuleEventListener(weakPcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
hashsetResults.deleteObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(hashsetResults.getSetNames());
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String key) {
|
||||
return new HashsetNameNode(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Node for a hash set name
|
||||
*/
|
||||
public class HashsetNameNode extends DisplayableItemNode implements Observer, SelectionResponder {
|
||||
|
||||
private final String hashSetName;
|
||||
|
||||
public HashsetNameNode(String hashSetName) {
|
||||
super(Children.LEAF,
|
||||
Lookups.fixed(hashSetName));
|
||||
super.setName(hashSetName);
|
||||
this.hashSetName = hashSetName;
|
||||
updateDisplayName();
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/hashset_hits.png"); //NON-NLS
|
||||
hashsetResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
dataResultPanel.displayAnalysisResultSet(new HashHitSearchParam(
|
||||
filteringDSObjId > 0 ? filteringDSObjId : null,
|
||||
hashSetName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the count in the display name
|
||||
*/
|
||||
private void updateDisplayName() {
|
||||
super.setDisplayName(hashSetName + " (" + hashsetResults.getArtifactIds(hashSetName).size() + ")");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "HashsetHits.createSheet.name.desc"),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
updateDisplayName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
/**
|
||||
* For custom settings for each hash set, return
|
||||
* getClass().getName() + hashSetName instead.
|
||||
*/
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the nodes for the hits in a given set.
|
||||
*/
|
||||
// private class HitFactory extends BaseChildFactory<AnalysisResult> implements Observer {
|
||||
//
|
||||
// private final String hashsetName;
|
||||
// private final Map<Long, AnalysisResult> artifactHits = new HashMap<>();
|
||||
//
|
||||
// private HitFactory(String hashsetName) {
|
||||
// super(hashsetName);
|
||||
// this.hashsetName = hashsetName;
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected void onAdd() {
|
||||
// hashsetResults.addObserver(this);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected void onRemove() {
|
||||
// hashsetResults.deleteObserver(this);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected Node createNodeForKey(AnalysisResult key) {
|
||||
// return new BlackboardArtifactNode(key);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void update(Observable o, Object arg) {
|
||||
// refresh(true);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected List<AnalysisResult> makeKeys() {
|
||||
// if (skCase != null) {
|
||||
//
|
||||
// hashsetResults.getArtifactIds(hashsetName).forEach((id) -> {
|
||||
// try {
|
||||
// if (!artifactHits.containsKey(id)) {
|
||||
// AnalysisResult art = skCase.getBlackboard().getAnalysisResultById(id);
|
||||
// //Cache attributes while we are off the EDT.
|
||||
// //See JIRA-5969
|
||||
// art.getAttributes();
|
||||
// artifactHits.put(id, art);
|
||||
// }
|
||||
// } catch (TskCoreException ex) {
|
||||
// logger.log(Level.SEVERE, "TSK Exception occurred", ex); //NON-NLS
|
||||
// }
|
||||
// });
|
||||
// return new ArrayList<>(artifactHits.values());
|
||||
// }
|
||||
// return Collections.emptyList();
|
||||
// }
|
||||
// }
|
||||
}
|
@ -1,477 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Observable;
|
||||
import java.util.Observer;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.AnalysisResult;
|
||||
import org.sleuthkit.autopsy.datamodel.Artifacts.UpdatableCountTypeNode;
|
||||
|
||||
public class InterestingHits implements AutopsyVisitableItem {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(InterestingHits.class.getName());
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
|
||||
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.DATA_ADDED);
|
||||
|
||||
private SleuthkitCase skCase;
|
||||
private final InterestingResults interestingResults = new InterestingResults();
|
||||
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
|
||||
private final BlackboardArtifact.Type artifactType;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
* @param artifactType The artifact type (either interesting file or
|
||||
* artifact).
|
||||
*
|
||||
*/
|
||||
public InterestingHits(SleuthkitCase skCase, BlackboardArtifact.Type artifactType) {
|
||||
this(skCase, artifactType, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
* @param artifactType The artifact type (either interesting file or
|
||||
* artifact).
|
||||
* @param objId Object id of the data source
|
||||
*
|
||||
*/
|
||||
public InterestingHits(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, long objId) {
|
||||
this.skCase = skCase;
|
||||
this.artifactType = artifactType;
|
||||
this.filteringDSObjId = objId;
|
||||
interestingResults.update();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache of result ids mapped by artifact type -> set name -> artifact id.
|
||||
*/
|
||||
private class InterestingResults extends Observable {
|
||||
|
||||
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
|
||||
private final Map<String, Set<Long>> interestingItemsMap = new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* Returns all the set names for a given interesting item type.
|
||||
*
|
||||
* @param type The interesting item type.
|
||||
*
|
||||
* @return The set names.
|
||||
*/
|
||||
List<String> getSetNames() {
|
||||
List<String> setNames;
|
||||
synchronized (interestingItemsMap) {
|
||||
setNames = new ArrayList<>(interestingItemsMap.keySet());
|
||||
}
|
||||
Collections.sort(setNames, (a, b) -> a.compareToIgnoreCase(b));
|
||||
return setNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all artifact ids belonging to the specified interesting item
|
||||
* type and set name.
|
||||
*
|
||||
* @param type The interesting item type.
|
||||
* @param setName The set name.
|
||||
*
|
||||
* @return The artifact ids in that set name and type.
|
||||
*/
|
||||
Set<Long> getArtifactIds(String setName) {
|
||||
synchronized (interestingItemsMap) {
|
||||
return new HashSet<>(interestingItemsMap.getOrDefault(setName, Collections.emptySet()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers a fetch from the database to update this cache.
|
||||
*/
|
||||
void update() {
|
||||
synchronized (interestingItemsMap) {
|
||||
interestingItemsMap.clear();
|
||||
}
|
||||
loadArtifacts();
|
||||
setChanged();
|
||||
notifyObservers();
|
||||
}
|
||||
|
||||
/*
|
||||
* Reads the artifacts of specified type, grouped by Set, and loads into
|
||||
* the interestingItemsMap
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
private void loadArtifacts() {
|
||||
if (skCase == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
int setNameId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID();
|
||||
|
||||
String query = "SELECT value_text, blackboard_artifacts.artifact_obj_id " //NON-NLS
|
||||
+ "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
|
||||
+ "attribute_type_id=" + setNameId //NON-NLS
|
||||
+ " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS
|
||||
+ " AND blackboard_artifacts.artifact_type_id = " + artifactType.getTypeID(); //NON-NLS
|
||||
if (filteringDSObjId > 0) {
|
||||
query += " AND blackboard_artifacts.data_source_obj_id = " + filteringDSObjId;
|
||||
}
|
||||
|
||||
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
|
||||
synchronized (interestingItemsMap) {
|
||||
ResultSet resultSet = dbQuery.getResultSet();
|
||||
while (resultSet.next()) {
|
||||
String value = resultSet.getString("value_text"); //NON-NLS
|
||||
long artifactObjId = resultSet.getLong("artifact_obj_id"); //NON-NLS
|
||||
interestingItemsMap
|
||||
.computeIfAbsent(value, (k) -> new HashSet<>())
|
||||
.add(artifactObjId);
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException | SQLException ex) {
|
||||
logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(AutopsyItemVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates nodes for all sets for a specified interesting item type.
|
||||
*/
|
||||
private class SetNameFactory extends ChildFactory.Detachable<String> implements Observer {
|
||||
|
||||
/*
|
||||
* This should probably be in the top-level class, but the factory has
|
||||
* nice methods for its startup and shutdown, so it seemed like a
|
||||
* cleaner place to register the property change listener.
|
||||
*/
|
||||
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked out.
|
||||
* Currently, remote events may be received for a case that is
|
||||
* already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Even with the check above, it is still possible that the
|
||||
* case will be closed in a different thread before this
|
||||
* code executes. If that happens, it is possible for the
|
||||
* event to have a null oldValue.
|
||||
*/
|
||||
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != eventData && (eventData.getBlackboardArtifactType().getTypeID() == artifactType.getTypeID())) {
|
||||
interestingResults.update();
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked out.
|
||||
* Currently, remote events may be received for a case that is
|
||||
* already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
interestingResults.update();
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
|
||||
// case was closed. Remove listeners so that we don't get called with a stale case handle
|
||||
if (evt.getNewValue() == null) {
|
||||
removeNotify();
|
||||
skCase = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(interestingResults.getSetNames());
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String key) {
|
||||
return new SetNameNode(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
interestingResults.addObserver(this);
|
||||
interestingResults.update();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
IngestManager.getInstance().removeIngestModuleEventListener(weakPcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
interestingResults.deleteObserver(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A node for a set to be displayed in the tree.
|
||||
*/
|
||||
public class SetNameNode extends DisplayableItemNode implements Observer {
|
||||
|
||||
private final String setName;
|
||||
|
||||
public SetNameNode(String setName) {//, Set<Long> children) {
|
||||
super(Children.create(new HitFactory(setName), true), Lookups.singleton(setName));
|
||||
this.setName = setName;
|
||||
super.setName(setName);
|
||||
updateDisplayName();
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/interesting_item.png"); //NON-NLS
|
||||
interestingResults.addObserver(this);
|
||||
}
|
||||
|
||||
private void updateDisplayName() {
|
||||
int sizeOfSet = interestingResults.getArtifactIds(setName).size();
|
||||
super.setDisplayName(setName + " (" + sizeOfSet + ")");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "InterestingHits.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "InterestingHits.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "InterestingHits.createSheet.name.desc"),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
updateDisplayName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
/**
|
||||
* For custom settings for each rule set, return
|
||||
* getClass().getName() + setName instead.
|
||||
*/
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parent node for interesting item type that shows child set nodes.
|
||||
*/
|
||||
public class RootNode extends UpdatableCountTypeNode {
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*/
|
||||
public RootNode() {
|
||||
super(Children.create(new SetNameFactory(), true),
|
||||
Lookups.singleton(artifactType),
|
||||
artifactType.getDisplayName(),
|
||||
filteringDSObjId,
|
||||
artifactType);
|
||||
|
||||
/**
|
||||
* We use the combination of setName and typeName as the name of the
|
||||
* node to ensure that nodes have a unique name. This comes into
|
||||
* play when associating paging state with the node.
|
||||
*/
|
||||
setName(artifactType.getDisplayName());
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/interesting_item.png"); //NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "InterestingHits.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "InterestingHits.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "InterestingHits.createSheet.name.desc"),
|
||||
getName()));
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
/**
|
||||
* For custom settings for each rule set, return
|
||||
* getClass().getName() + setName instead.
|
||||
*/
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory for creating individual interesting item BlackboardArtifactNodes.
|
||||
*/
|
||||
private class HitFactory extends BaseChildFactory<AnalysisResult> implements Observer {
|
||||
|
||||
private final String setName;
|
||||
private final Map<Long, AnalysisResult> artifactHits = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param setName The set name of artifacts to be displayed.
|
||||
*/
|
||||
private HitFactory(String setName) {
|
||||
/**
|
||||
* The node name passed to the parent constructor must be the same
|
||||
* as the name set in the InterestingItemTypeNode constructor, i.e.
|
||||
* setName underscore typeName
|
||||
*/
|
||||
super(setName);
|
||||
this.setName = setName;
|
||||
interestingResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<AnalysisResult> makeKeys() {
|
||||
|
||||
if (skCase != null) {
|
||||
interestingResults.getArtifactIds(setName).forEach((id) -> {
|
||||
try {
|
||||
if (!artifactHits.containsKey(id)) {
|
||||
AnalysisResult art = skCase.getBlackboard().getAnalysisResultById(id);
|
||||
//Cache attributes while we are off the EDT.
|
||||
//See JIRA-5969
|
||||
art.getAttributes();
|
||||
artifactHits.put(id, art);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "TSK Exception occurred", ex); //NON-NLS
|
||||
}
|
||||
});
|
||||
|
||||
return new ArrayList<>(artifactHits.values());
|
||||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(AnalysisResult art) {
|
||||
return new BlackboardArtifactNode(art);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onAdd() {
|
||||
// No-op
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRemove() {
|
||||
// No-op
|
||||
}
|
||||
}
|
||||
}
|
@ -1,980 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Observable;
|
||||
import java.util.Observer;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.Lookup;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_KEYWORD_HIT;
|
||||
import org.sleuthkit.autopsy.datamodel.Artifacts.UpdatableCountTypeNode;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
|
||||
import org.sleuthkit.autopsy.corecomponents.SelectionResponder;
|
||||
import org.sleuthkit.datamodel.AnalysisResult;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Keyword hits node support
|
||||
*/
|
||||
public class KeywordHits implements AutopsyVisitableItem {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(KeywordHits.class.getName());
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
|
||||
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.DATA_ADDED);
|
||||
@NbBundle.Messages("KeywordHits.kwHits.text=Keyword Hits")
|
||||
private static final String KEYWORD_HITS = Bundle.KeywordHits_kwHits_text();
|
||||
@NbBundle.Messages("KeywordHits.simpleLiteralSearch.text=Single Literal Keyword Search")
|
||||
private static final String SIMPLE_LITERAL_SEARCH = Bundle.KeywordHits_simpleLiteralSearch_text();
|
||||
@NbBundle.Messages("KeywordHits.singleRegexSearch.text=Single Regular Expression Search")
|
||||
private static final String SIMPLE_REGEX_SEARCH = Bundle.KeywordHits_singleRegexSearch_text();
|
||||
|
||||
public static final String NAME = BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeName();
|
||||
|
||||
private SleuthkitCase skCase;
|
||||
private final KeywordResults keywordResults;
|
||||
private final long filteringDSObjId; // 0 if not filtering/grouping by data source
|
||||
|
||||
/**
|
||||
* String used in the instance MAP so that exact matches and substring can
|
||||
* fit into the same data structure as regexps, even though they don't use
|
||||
* instances.
|
||||
*/
|
||||
private static final String DEFAULT_INSTANCE_NAME = "DEFAULT_INSTANCE_NAME";
|
||||
|
||||
/**
|
||||
* query attributes table for the ones that we need for the tree
|
||||
*/
|
||||
private static final String KEYWORD_HIT_ATTRIBUTES_QUERY = "SELECT blackboard_attributes.value_text, "//NON-NLS
|
||||
+ "blackboard_attributes.value_int32, "//NON-NLS
|
||||
+ "blackboard_artifacts.artifact_obj_id, " //NON-NLS
|
||||
+ "blackboard_attributes.attribute_type_id "//NON-NLS
|
||||
+ "FROM blackboard_attributes, blackboard_artifacts "//NON-NLS
|
||||
+ "WHERE blackboard_attributes.artifact_id = blackboard_artifacts.artifact_id "//NON-NLS
|
||||
+ " AND blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID() //NON-NLS
|
||||
+ " AND (attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()//NON-NLS
|
||||
+ " OR attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()//NON-NLS
|
||||
+ " OR attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE.getTypeID()//NON-NLS
|
||||
+ " OR attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()//NON-NLS
|
||||
+ ")"; //NON-NLS
|
||||
|
||||
static private boolean isOnlyDefaultInstance(List<String> instances) {
|
||||
return (instances.size() == 1) && (instances.get(0).equals(DEFAULT_INSTANCE_NAME));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
*/
|
||||
KeywordHits(SleuthkitCase skCase) {
|
||||
this(skCase, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param skCase Case DB
|
||||
* @param objId Object id of the data source
|
||||
*
|
||||
*/
|
||||
public KeywordHits(SleuthkitCase skCase, long objId) {
|
||||
this.skCase = skCase;
|
||||
this.filteringDSObjId = objId;
|
||||
keywordResults = new KeywordResults();
|
||||
}
|
||||
|
||||
/*
|
||||
* All of these maps and code assume the following: Regexps will have an
|
||||
* 'instance' layer that shows the specific words that matched the regexp
|
||||
* Exact match and substring will not have the instance layer and instead
|
||||
* will have the specific hits below their term.
|
||||
*/
|
||||
private final class KeywordResults extends Observable {
|
||||
|
||||
// Map from listName/Type to Map of keywords/regexp to Map of instance terms to Set of artifact Ids
|
||||
// NOTE: the map can be accessed by multiple worker threads and needs to be synchronized
|
||||
private final Map<String, Map<String, Map<String, Set<Long>>>> topLevelMap = new LinkedHashMap<>();
|
||||
|
||||
KeywordResults() {
|
||||
update();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list names used in searches.
|
||||
*
|
||||
* @return The list of list names.
|
||||
*/
|
||||
List<String> getListNames() {
|
||||
synchronized (topLevelMap) {
|
||||
List<String> names = new ArrayList<>(topLevelMap.keySet());
|
||||
|
||||
// sort the list names, but ensure that the special lists
|
||||
// stay at the top.
|
||||
Collections.sort(names, new Comparator<String>() {
|
||||
|
||||
@Override
|
||||
public int compare(String o1, String o2) {
|
||||
// ideally, they would not be hard coded, but this module
|
||||
// doesn't know about Keyword Search NBM
|
||||
if (o1.startsWith("Single Literal Keyword Search")) {
|
||||
return -1;
|
||||
} else if (o2.startsWith("Single Literal Keyword Search")) {
|
||||
return 1;
|
||||
} else if (o1.startsWith("Single Regular Expression Search")) {
|
||||
return -1;
|
||||
} else if (o2.startsWith("Single Regular Expression Search")) {
|
||||
return 1;
|
||||
}
|
||||
return o1.compareTo(o2);
|
||||
}
|
||||
});
|
||||
|
||||
return names;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get keywords used in a given list. Will be regexp patterns for
|
||||
* regexps and search term for non-regexps.
|
||||
*
|
||||
* @param listName Keyword list name
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
List<String> getKeywords(String listName) {
|
||||
List<String> keywords;
|
||||
synchronized (topLevelMap) {
|
||||
keywords = new ArrayList<>(topLevelMap.get(listName).keySet());
|
||||
}
|
||||
Collections.sort(keywords);
|
||||
return keywords;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get specific keyword terms that were found for a given list and
|
||||
* keyword combination. For example, a specific phone number for a phone
|
||||
* number regexp. Will be the default instance for non-regexp searches.
|
||||
*
|
||||
* @param listName Keyword list name
|
||||
* @param keyword search term (regexp pattern or exact match term)
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
List<String> getKeywordInstances(String listName, String keyword) {
|
||||
List<String> instances;
|
||||
synchronized (topLevelMap) {
|
||||
instances = new ArrayList<>(topLevelMap.get(listName).get(keyword).keySet());
|
||||
}
|
||||
Collections.sort(instances);
|
||||
return instances;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get artifact ids for a given list, keyword, and instance triple
|
||||
*
|
||||
* @param listName Keyword list name
|
||||
* @param keyword search term (regexp pattern or exact match
|
||||
* term)
|
||||
* @param keywordInstance specific term that matched (or default
|
||||
* instance name)
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Set<Long> getArtifactIds(String listName, String keyword, String keywordInstance) {
|
||||
synchronized (topLevelMap) {
|
||||
return topLevelMap.get(listName).get(keyword).get(keywordInstance);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a hit for a regexp to the internal data structure.
|
||||
*
|
||||
* @param listMap Maps keywords/regexp to instances to artifact
|
||||
* IDs
|
||||
* @param regExp Regular expression that was used in search
|
||||
* @param keywordInstance Specific term that matched regexp
|
||||
* @param artifactId Artifact id of file that had hit
|
||||
*/
|
||||
void addRegExpToList(Map<String, Map<String, Set<Long>>> listMap, String regExp, String keywordInstance, Long artifactId) {
|
||||
Map<String, Set<Long>> instanceMap = listMap.computeIfAbsent(regExp, r -> new LinkedHashMap<>());
|
||||
// add this ID to the instances entry, creating one if needed
|
||||
instanceMap.computeIfAbsent(keywordInstance, ki -> new HashSet<>()).add(artifactId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a hit for a exactmatch (or substring) to the internal data
|
||||
* structure.
|
||||
*
|
||||
* @param listMap Maps keywords/regexp to instances to artifact IDs
|
||||
* @param keyWord Term that was hit
|
||||
* @param artifactId Artifact id of file that had hit
|
||||
*/
|
||||
void addNonRegExpMatchToList(Map<String, Map<String, Set<Long>>> listMap, String keyWord, Long artifactId) {
|
||||
Map<String, Set<Long>> instanceMap = listMap.computeIfAbsent(keyWord, k -> new LinkedHashMap<>());
|
||||
|
||||
// Use the default instance name, since we don't need that level in the tree
|
||||
instanceMap.computeIfAbsent(DEFAULT_INSTANCE_NAME, DIN -> new HashSet<>()).add(artifactId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate data structure for the tree based on the keyword hit
|
||||
* artifacts
|
||||
*
|
||||
* @param artifactIds Maps Artifact ID to map of attribute types to
|
||||
* attribute values
|
||||
*/
|
||||
void populateTreeMaps(Map<Long, Map<Long, String>> artifactIds) {
|
||||
synchronized (topLevelMap) {
|
||||
topLevelMap.clear();
|
||||
|
||||
// map of list name to keword to artifact IDs
|
||||
Map<String, Map<String, Map<String, Set<Long>>>> listsMap = new LinkedHashMap<>();
|
||||
|
||||
// Map from from literal keyword to instances (which will be empty) to artifact IDs
|
||||
Map<String, Map<String, Set<Long>>> literalMap = new LinkedHashMap<>();
|
||||
|
||||
// Map from regex keyword artifact to instances to artifact IDs
|
||||
Map<String, Map<String, Set<Long>>> regexMap = new LinkedHashMap<>();
|
||||
|
||||
// top-level nodes
|
||||
topLevelMap.put(SIMPLE_LITERAL_SEARCH, literalMap);
|
||||
topLevelMap.put(SIMPLE_REGEX_SEARCH, regexMap);
|
||||
|
||||
for (Map.Entry<Long, Map<Long, String>> art : artifactIds.entrySet()) {
|
||||
long id = art.getKey();
|
||||
Map<Long, String> attributes = art.getValue();
|
||||
|
||||
// I think we can use attributes.remove(...) here? - why should bwe use remove?
|
||||
String listName = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()));
|
||||
String word = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()));
|
||||
String reg = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()));
|
||||
String kwType = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_TYPE.getTypeID()));
|
||||
|
||||
if (listName != null) { // part of a list
|
||||
// get or create list entry
|
||||
Map<String, Map<String, Set<Long>>> listMap = listsMap.computeIfAbsent(listName, ln -> new LinkedHashMap<>());
|
||||
|
||||
if (Integer.parseInt(kwType) == TskData.KeywordSearchQueryType.SUBSTRING.getType() || reg == null) { //literal, substring or exact
|
||||
/*
|
||||
* Substring, treated same as exact match.
|
||||
*/
|
||||
word = (reg != null) ? reg : word; //use original term if it there.
|
||||
addNonRegExpMatchToList(listMap, word, id);
|
||||
} else {
|
||||
addRegExpToList(listMap, reg, word, id);
|
||||
}
|
||||
} else {//single term
|
||||
if (Integer.parseInt(kwType) == TskData.KeywordSearchQueryType.SUBSTRING.getType() || reg == null) { //literal, substring or exact
|
||||
/*
|
||||
* Substring, treated same as exact match.
|
||||
*/
|
||||
word = (reg != null) ? reg : word; //use original term if it there.
|
||||
addNonRegExpMatchToList(literalMap, word, id);
|
||||
} else {
|
||||
addRegExpToList(regexMap, reg, word, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
topLevelMap.putAll(listsMap);
|
||||
}
|
||||
|
||||
setChanged();
|
||||
notifyObservers();
|
||||
}
|
||||
|
||||
public void update() {
|
||||
// maps Artifact ID to map of attribute types to attribute values
|
||||
Map<Long, Map<Long, String>> artifactIds = new LinkedHashMap<>();
|
||||
|
||||
if (skCase == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String queryStr = KEYWORD_HIT_ATTRIBUTES_QUERY;
|
||||
if (filteringDSObjId > 0) {
|
||||
queryStr += " AND blackboard_artifacts.data_source_obj_id = " + filteringDSObjId;
|
||||
}
|
||||
|
||||
try (CaseDbQuery dbQuery = skCase.executeQuery(queryStr)) {
|
||||
ResultSet resultSet = dbQuery.getResultSet();
|
||||
while (resultSet.next()) {
|
||||
long artifactObjId = resultSet.getLong("artifact_obj_id"); //NON-NLS
|
||||
long typeId = resultSet.getLong("attribute_type_id"); //NON-NLS
|
||||
String valueStr = resultSet.getString("value_text"); //NON-NLS
|
||||
|
||||
//get the map of attributes for this artifact
|
||||
Map<Long, String> attributesByTypeMap = artifactIds.computeIfAbsent(artifactObjId, ai -> new LinkedHashMap<>());
|
||||
if (StringUtils.isNotEmpty(valueStr)) {
|
||||
attributesByTypeMap.put(typeId, valueStr);
|
||||
} else {
|
||||
// Keyword Search Type is an int
|
||||
Long valueLong = resultSet.getLong("value_int32");
|
||||
attributesByTypeMap.put(typeId, valueLong.toString());
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException | SQLException ex) {
|
||||
logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS
|
||||
}
|
||||
|
||||
populateTreeMaps(artifactIds);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(AutopsyItemVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
// Created by CreateAutopsyNodeVisitor
|
||||
public class RootNode extends UpdatableCountTypeNode {
|
||||
|
||||
public RootNode() {
|
||||
super(Children.create(new ListFactory(), true),
|
||||
Lookups.singleton(KEYWORD_HITS),
|
||||
KEYWORD_HITS,
|
||||
filteringDSObjId,
|
||||
TSK_KEYWORD_HIT);
|
||||
|
||||
super.setName(NAME);
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages({"KeywordHits.createSheet.name.name=Name",
|
||||
"KeywordHits.createSheet.name.displayName=Name",
|
||||
"KeywordHits.createSheet.name.desc=no description"})
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_name_name(),
|
||||
Bundle.KeywordHits_createSheet_name_displayName(),
|
||||
Bundle.KeywordHits_createSheet_name_desc(),
|
||||
getName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class DetachableObserverChildFactory<X> extends ChildFactory.Detachable<X> implements Observer {
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
keywordResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
super.finalize();
|
||||
keywordResults.deleteObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the list nodes
|
||||
*/
|
||||
private class ListFactory extends DetachableObserverChildFactory<String> {
|
||||
|
||||
private final PropertyChangeListener pcl = new PropertyChangeListener() {
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked
|
||||
* out. Currently, remote events may be received for a case
|
||||
* that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Even with the check above, it is still possible that
|
||||
* the case will be closed in a different thread before
|
||||
* this code executes. If that happens, it is possible
|
||||
* for the event to have a null oldValue.
|
||||
*/
|
||||
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) {
|
||||
keywordResults.update();
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
// Case is closed, do nothing.
|
||||
}
|
||||
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* Checking for a current case is a stop gap measure until a
|
||||
* different way of handling the closing of cases is worked
|
||||
* out. Currently, remote events may be received for a case
|
||||
* that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
keywordResults.update();
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
// Case is closed, do nothing.
|
||||
}
|
||||
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())
|
||||
&& evt.getNewValue() == null) {
|
||||
/*
|
||||
* Case was closed. Remove listeners so that we don't get
|
||||
* called with a stale case handle
|
||||
*/
|
||||
removeNotify();
|
||||
skCase = null;
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, weakPcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
keywordResults.update();
|
||||
super.addNotify();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
IngestManager.getInstance().removeIngestModuleEventListener(weakPcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
super.finalize();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(keywordResults.getListNames());
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String key) {
|
||||
return new ListNode(key);
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class KWHitsNodeBase extends DisplayableItemNode implements Observer {
|
||||
|
||||
private String displayName;
|
||||
|
||||
private KWHitsNodeBase(Children children, Lookup lookup, String displayName) {
|
||||
super(children, lookup);
|
||||
this.displayName = displayName;
|
||||
}
|
||||
|
||||
private KWHitsNodeBase(Children children) {
|
||||
super(children);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
updateDisplayName();
|
||||
}
|
||||
|
||||
final void updateDisplayName() {
|
||||
super.setDisplayName(displayName + " (" + countTotalDescendants() + ")");
|
||||
}
|
||||
|
||||
abstract int countTotalDescendants();
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the keyword search lists (or default groupings if list was not
|
||||
* given)
|
||||
*/
|
||||
class ListNode extends KWHitsNodeBase {
|
||||
|
||||
private final String listName;
|
||||
|
||||
private ListNode(String listName) {
|
||||
super(Children.create(new TermFactory(listName), true), Lookups.singleton(listName), listName);
|
||||
super.setName(listName);
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
|
||||
this.listName = listName;
|
||||
updateDisplayName();
|
||||
keywordResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countTotalDescendants() {
|
||||
int totalDescendants = 0;
|
||||
|
||||
for (String word : keywordResults.getKeywords(listName)) {
|
||||
for (String instance : keywordResults.getKeywordInstances(listName, word)) {
|
||||
Set<Long> ids = keywordResults.getArtifactIds(listName, word, instance);
|
||||
totalDescendants += ids.size();
|
||||
}
|
||||
}
|
||||
return totalDescendants;
|
||||
}
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages({"KeywordHits.createSheet.listName.name=List Name",
|
||||
"KeywordHits.createSheet.listName.displayName=List Name",
|
||||
"KeywordHits.createSheet.listName.desc=no description",
|
||||
"KeywordHits.createSheet.numChildren.name=Number of Children",
|
||||
"KeywordHits.createSheet.numChildren.displayName=Number of Children",
|
||||
"KeywordHits.createSheet.numChildren.desc=no description"})
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_listName_name(),
|
||||
Bundle.KeywordHits_createSheet_listName_displayName(),
|
||||
Bundle.KeywordHits_createSheet_listName_desc(),
|
||||
listName));
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_numChildren_name(),
|
||||
Bundle.KeywordHits_createSheet_numChildren_displayName(),
|
||||
Bundle.KeywordHits_createSheet_numChildren_desc(),
|
||||
keywordResults.getKeywords(listName).size()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the nodes that represent search terms
|
||||
*/
|
||||
private class TermFactory extends DetachableObserverChildFactory<String> {
|
||||
|
||||
private final String setName;
|
||||
|
||||
private TermFactory(String setName) {
|
||||
super();
|
||||
this.setName = setName;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(keywordResults.getKeywords(setName));
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String key) {
|
||||
return new TermNode(setName, key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a ChildFactory object for the given set name and keyword.
|
||||
*
|
||||
* The type of ChildFactory we create is based on whether the node
|
||||
* represents a regular expression keyword search or not. For regular
|
||||
* expression keyword searches there will be an extra layer in the tree that
|
||||
* represents each of the individual terms found by the regular expression.
|
||||
* E.g., for an email regular expression search there will be a node in the
|
||||
* tree for every email address hit.
|
||||
*/
|
||||
ChildFactory<?> createChildFactory(String setName, String keyword) {
|
||||
if (isOnlyDefaultInstance(keywordResults.getKeywordInstances(setName, keyword))) {
|
||||
return new HitsFactory(setName, keyword, DEFAULT_INSTANCE_NAME);
|
||||
} else {
|
||||
return new RegExpInstancesFactory(setName, keyword);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the search term or regexp that user searched for
|
||||
*/
|
||||
class TermNode extends KWHitsNodeBase {
|
||||
|
||||
private final String setName;
|
||||
private final String keyword;
|
||||
|
||||
private TermNode(String setName, String keyword) {
|
||||
super(Children.create(createChildFactory(setName, keyword), true), Lookups.singleton(keyword), keyword);
|
||||
|
||||
/**
|
||||
* We differentiate between the programmatic name and the display
|
||||
* name. The programmatic name is used to create an association with
|
||||
* an event bus and must be the same as the node name passed by our
|
||||
* ChildFactory to it's parent constructor. See the HitsFactory
|
||||
* constructor for an example.
|
||||
*/
|
||||
super.setName(setName + "_" + keyword);
|
||||
this.setName = setName;
|
||||
this.keyword = keyword;
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
|
||||
updateDisplayName();
|
||||
keywordResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
int countTotalDescendants() {
|
||||
return keywordResults.getKeywordInstances(setName, keyword).stream()
|
||||
.mapToInt(instance -> keywordResults.getArtifactIds(setName, keyword, instance).size())
|
||||
.sum();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
// is this an exact/substring match (i.e. did we use the DEFAULT name)?
|
||||
return isOnlyDefaultInstance(keywordResults.getKeywordInstances(setName, keyword));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages({"KeywordHits.createSheet.filesWithHits.name=Files with Hits",
|
||||
"KeywordHits.createSheet.filesWithHits.displayName=Files with Hits",
|
||||
"KeywordHits.createSheet.filesWithHits.desc=no description"})
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_listName_name(),
|
||||
Bundle.KeywordHits_createSheet_listName_displayName(),
|
||||
Bundle.KeywordHits_createSheet_listName_desc(),
|
||||
getDisplayName()));
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_filesWithHits_name(),
|
||||
Bundle.KeywordHits_createSheet_filesWithHits_displayName(),
|
||||
Bundle.KeywordHits_createSheet_filesWithHits_desc(),
|
||||
countTotalDescendants()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the nodes for a given regexp that represent the specific terms
|
||||
* that were found
|
||||
*/
|
||||
private class RegExpInstancesFactory extends DetachableObserverChildFactory<String> {
|
||||
|
||||
private final String keyword;
|
||||
private final String setName;
|
||||
|
||||
private RegExpInstancesFactory(String setName, String keyword) {
|
||||
super();
|
||||
this.setName = setName;
|
||||
this.keyword = keyword;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<String> list) {
|
||||
list.addAll(keywordResults.getKeywordInstances(setName, keyword));
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(String key) {
|
||||
return new RegExpInstanceNode(setName, keyword, key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a specific term that was found from a regexp
|
||||
*/
|
||||
class RegExpInstanceNode extends KWHitsNodeBase implements SelectionResponder{
|
||||
|
||||
private final String setName;
|
||||
private final String keyword;
|
||||
private final String instance;
|
||||
|
||||
private RegExpInstanceNode(String setName, String keyword, String instance) {
|
||||
super(Children.LEAF, Lookups.singleton(instance), instance);
|
||||
|
||||
/**
|
||||
* We differentiate between the programmatic name and the display
|
||||
* name. The programmatic name is used to create an association with
|
||||
* an event bus and must be the same as the node name passed by our
|
||||
* ChildFactory to it's parent constructor. See the HitsFactory
|
||||
* constructor for an example.
|
||||
*/
|
||||
super.setName(setName + "_" + keyword + "_" + instance);
|
||||
this.setName = setName;
|
||||
this.keyword = keyword;
|
||||
this.instance = instance;
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
|
||||
updateDisplayName();
|
||||
keywordResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
dataResultPanel.displayKeywordHits(new KeywordHitSearchParam(
|
||||
filteringDSObjId > 0 ? filteringDSObjId : null,
|
||||
setName, keyword, instance, TskData.KeywordSearchQueryType.REGEX));
|
||||
}
|
||||
|
||||
@Override
|
||||
int countTotalDescendants() {
|
||||
return keywordResults.getArtifactIds(setName, keyword, instance).size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_listName_name(),
|
||||
Bundle.KeywordHits_createSheet_listName_displayName(),
|
||||
Bundle.KeywordHits_createSheet_listName_desc(),
|
||||
getDisplayName()));
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createSheet_filesWithHits_name(),
|
||||
Bundle.KeywordHits_createSheet_filesWithHits_displayName(),
|
||||
Bundle.KeywordHits_createSheet_filesWithHits_desc(),
|
||||
keywordResults.getArtifactIds(setName, keyword, instance).size()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a blackboard node for the given Keyword Hit artifact
|
||||
*
|
||||
* @param art
|
||||
*
|
||||
* @return Node or null on error
|
||||
*/
|
||||
@NbBundle.Messages({"KeywordHits.createNodeForKey.modTime.name=ModifiedTime",
|
||||
"KeywordHits.createNodeForKey.modTime.displayName=Modified Time",
|
||||
"KeywordHits.createNodeForKey.modTime.desc=Modified Time",
|
||||
"KeywordHits.createNodeForKey.accessTime.name=AccessTime",
|
||||
"KeywordHits.createNodeForKey.accessTime.displayName=Access Time",
|
||||
"KeywordHits.createNodeForKey.accessTime.desc=Access Time",
|
||||
"KeywordHits.createNodeForKey.chgTime.name=ChangeTime",
|
||||
"KeywordHits.createNodeForKey.chgTime.displayName=Change Time",
|
||||
"KeywordHits.createNodeForKey.chgTime.desc=Change Time"})
|
||||
private BlackboardArtifactNode createBlackboardArtifactNode(AnalysisResult art) {
|
||||
if (skCase == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
BlackboardArtifactNode n = new BlackboardArtifactNode(art); //NON-NLS
|
||||
|
||||
// The associated file should be available through the Lookup that
|
||||
// gets created when the BlackboardArtifactNode is constructed.
|
||||
AbstractFile file = n.getLookup().lookup(AbstractFile.class);
|
||||
if (file == null) {
|
||||
try {
|
||||
file = skCase.getAbstractFileById(art.getObjectID());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "TskCoreException while constructing BlackboardArtifact Node from KeywordHitsKeywordChildren", ex); //NON-NLS
|
||||
return n;
|
||||
}
|
||||
}
|
||||
/*
|
||||
* It is possible to get a keyword hit on artifacts generated for the
|
||||
* underlying image in which case MAC times are not
|
||||
* available/applicable/useful.
|
||||
*/
|
||||
if (file == null) {
|
||||
return n;
|
||||
}
|
||||
n.addNodeProperty(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createNodeForKey_modTime_name(),
|
||||
Bundle.KeywordHits_createNodeForKey_modTime_displayName(),
|
||||
Bundle.KeywordHits_createNodeForKey_modTime_desc(),
|
||||
TimeZoneUtils.getFormattedTime(file.getMtime())));
|
||||
n.addNodeProperty(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createNodeForKey_accessTime_name(),
|
||||
Bundle.KeywordHits_createNodeForKey_accessTime_displayName(),
|
||||
Bundle.KeywordHits_createNodeForKey_accessTime_desc(),
|
||||
TimeZoneUtils.getFormattedTime(file.getAtime())));
|
||||
n.addNodeProperty(new NodeProperty<>(
|
||||
Bundle.KeywordHits_createNodeForKey_chgTime_name(),
|
||||
Bundle.KeywordHits_createNodeForKey_chgTime_displayName(),
|
||||
Bundle.KeywordHits_createNodeForKey_chgTime_desc(),
|
||||
TimeZoneUtils.getFormattedTime(file.getCtime())));
|
||||
return n;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates nodes for individual files that had hits
|
||||
*/
|
||||
private class HitsFactory extends BaseChildFactory<AnalysisResult> implements Observer {
|
||||
|
||||
private final String keyword;
|
||||
private final String setName;
|
||||
private final String instance;
|
||||
private final Map<Long, AnalysisResult> artifactHits = new HashMap<>();
|
||||
|
||||
private HitsFactory(String setName, String keyword, String instance) {
|
||||
/**
|
||||
* The node name passed to the parent constructor will consist of
|
||||
* the set name, keyword and optionally the instance name (in the
|
||||
* case of regular expression hits. This name must match the name
|
||||
* set in the TermNode or RegExpInstanceNode constructors.
|
||||
*/
|
||||
super(setName + "_" + keyword + (DEFAULT_INSTANCE_NAME.equals(instance) ? "" : "_" + instance));
|
||||
this.setName = setName;
|
||||
this.keyword = keyword;
|
||||
this.instance = instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<AnalysisResult> makeKeys() {
|
||||
if (skCase != null) {
|
||||
keywordResults.getArtifactIds(setName, keyword, instance).forEach((id) -> {
|
||||
try {
|
||||
if (!artifactHits.containsKey(id)) {
|
||||
AnalysisResult art = skCase.getBlackboard().getAnalysisResultById(id);
|
||||
//Cache attributes while we are off the EDT.
|
||||
//See JIRA-5969
|
||||
art.getAttributes();
|
||||
artifactHits.put(id, art);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "TSK Exception occurred", ex); //NON-NLS
|
||||
}
|
||||
});
|
||||
|
||||
return new ArrayList<>(artifactHits.values());
|
||||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(AnalysisResult art) {
|
||||
return createBlackboardArtifactNode(art);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onAdd() {
|
||||
keywordResults.addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onRemove() {
|
||||
keywordResults.deleteObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Observable o, Object arg) {
|
||||
refresh(true);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,99 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import org.sleuthkit.autopsy.datamodel.AutopsyVisitableItem;
|
||||
import org.sleuthkit.autopsy.datamodel.AutopsyItemVisitor;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
/**
|
||||
* Recent files node support NOTE: As of june '15 we do not display this in the
|
||||
* tree. It can be added back when we have filtering in the results area.
|
||||
*/
|
||||
public class RecentFiles implements AutopsyVisitableItem {
|
||||
|
||||
SleuthkitCase skCase;
|
||||
|
||||
public enum RecentFilesFilter implements AutopsyVisitableItem {
|
||||
|
||||
AUT_0DAY_FILTER(0, "AUT_0DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut0DayFilter.displayName.text"), 0),
|
||||
AUT_1DAY_FILTER(0, "AUT_1DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut1dayFilter.displayName.text"), 1),
|
||||
AUT_2DAY_FILTER(0, "AUT_2DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut2dayFilter.displayName.text"), 2),
|
||||
AUT_3DAY_FILTER(0, "AUT_3DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut3dayFilter.displayName.text"), 3),
|
||||
AUT_4DAY_FILTER(0, "AUT_4DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut4dayFilter.displayName.text"), 4),
|
||||
AUT_5DAY_FILTER(0, "AUT_5DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut5dayFilter.displayName.text"), 5),
|
||||
AUT_6DAY_FILTER(0, "AUT_6DAY_FILTER", //NON-NLS
|
||||
NbBundle.getMessage(RecentFiles.class, "RecentFiles.aut6dayFilter.displayName.text"), 6);
|
||||
|
||||
private int id;
|
||||
private String name;
|
||||
private String displayName;
|
||||
private int durationDays;
|
||||
|
||||
private RecentFilesFilter(int id, String name, String displayName, int durationDays) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.displayName = displayName;
|
||||
this.durationDays = durationDays;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
public int getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return this.displayName;
|
||||
}
|
||||
|
||||
public int getDurationDays() {
|
||||
return this.durationDays;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(AutopsyItemVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public RecentFiles(SleuthkitCase skCase) {
|
||||
this.skCase = skCase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(AutopsyItemVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
public SleuthkitCase getSleuthkitCase() {
|
||||
return this.skCase;
|
||||
}
|
||||
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Node;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author dfickling
|
||||
*/
|
||||
class RecentFilesChildren extends ChildFactory<RecentFiles.RecentFilesFilter> {
|
||||
|
||||
private SleuthkitCase skCase;
|
||||
private Calendar lastDay;
|
||||
private final static Logger logger = Logger.getLogger(RecentFilesChildren.class.getName());
|
||||
|
||||
public RecentFilesChildren(SleuthkitCase skCase) {
|
||||
this.skCase = skCase;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<RecentFiles.RecentFilesFilter> list) {
|
||||
list.addAll(Arrays.asList(RecentFiles.RecentFilesFilter.values()));
|
||||
lastDay = Calendar.getInstance();
|
||||
lastDay.setTimeInMillis(getLastTime() * 1000);
|
||||
lastDay.set(Calendar.HOUR_OF_DAY, 0);
|
||||
lastDay.set(Calendar.MINUTE, 0);
|
||||
lastDay.set(Calendar.SECOND, 0);
|
||||
lastDay.set(Calendar.MILLISECOND, 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(RecentFiles.RecentFilesFilter key) {
|
||||
return new RecentFilesFilterNode(skCase, key, lastDay);
|
||||
}
|
||||
|
||||
private long getLastTime() {
|
||||
String query = createMaxQuery("crtime"); //NON-NLS
|
||||
long maxcr = runTimeQuery(query);
|
||||
query = createMaxQuery("ctime"); //NON-NLS
|
||||
long maxc = runTimeQuery(query);
|
||||
query = createMaxQuery("mtime"); //NON-NLS
|
||||
long maxm = runTimeQuery(query);
|
||||
//query = createMaxQuery("atime");
|
||||
//long maxa = runTimeQuery(query);
|
||||
//return Math.max(maxcr, Math.max(maxc, Math.max(maxm, maxa)));
|
||||
return Math.max(maxcr, Math.max(maxc, maxm));
|
||||
}
|
||||
|
||||
//TODO add a generic query to SleuthkitCase
|
||||
private String createMaxQuery(String attr) {
|
||||
return "SELECT MAX(" + attr + ") FROM tsk_files WHERE " + attr + " < " + System.currentTimeMillis() / 1000; //NON-NLS
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private long runTimeQuery(String query) {
|
||||
long result = 0;
|
||||
|
||||
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
|
||||
ResultSet resultSet = dbQuery.getResultSet();
|
||||
resultSet.next();
|
||||
result = resultSet.getLong(1);
|
||||
} catch (TskCoreException | SQLException ex) {
|
||||
logger.log(Level.WARNING, "Couldn't get recent files results: ", ex); //NON-NLS
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
@ -1,145 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.openide.nodes.AbstractNode;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Node;
|
||||
import org.sleuthkit.autopsy.datamodel.RecentFiles.RecentFilesFilter;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentVisitor;
|
||||
import org.sleuthkit.datamodel.DerivedFile;
|
||||
import org.sleuthkit.datamodel.Directory;
|
||||
import org.sleuthkit.datamodel.File;
|
||||
import org.sleuthkit.datamodel.LocalFile;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author dfickling
|
||||
*/
|
||||
class RecentFilesFilterChildren extends ChildFactory<Content> {
|
||||
|
||||
private SleuthkitCase skCase;
|
||||
private RecentFilesFilter filter;
|
||||
private Calendar prevDay;
|
||||
private final static Logger logger = Logger.getLogger(RecentFilesFilterChildren.class.getName());
|
||||
//private final static int MAX_OBJECTS = 1000000;
|
||||
|
||||
RecentFilesFilterChildren(RecentFilesFilter filter, SleuthkitCase skCase, Calendar lastDay) {
|
||||
this.skCase = skCase;
|
||||
this.filter = filter;
|
||||
this.prevDay = (Calendar) lastDay.clone();
|
||||
prevDay.add(Calendar.DATE, -filter.getDurationDays());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<Content> list) {
|
||||
list.addAll(runQuery());
|
||||
return true;
|
||||
}
|
||||
|
||||
private String createQuery() {
|
||||
Calendar prevDayQuery = (Calendar) prevDay.clone();
|
||||
String query = "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")" //NON-NLS
|
||||
+ " AND (known IS NULL OR known != 1) AND ("; //NON-NLS
|
||||
long lowerLimit = prevDayQuery.getTimeInMillis() / 1000;
|
||||
prevDayQuery.add(Calendar.DATE, 1);
|
||||
prevDayQuery.add(Calendar.MILLISECOND, -1);
|
||||
long upperLimit = prevDayQuery.getTimeInMillis() / 1000;
|
||||
query += "(crtime BETWEEN " + lowerLimit + " AND " + upperLimit + ") OR "; //NON-NLS
|
||||
query += "(ctime BETWEEN " + lowerLimit + " AND " + upperLimit + ") OR "; //NON-NLS
|
||||
//query += "(atime BETWEEN " + lowerLimit + " AND " + upperLimit + ") OR ";
|
||||
query += "(mtime BETWEEN " + lowerLimit + " AND " + upperLimit + "))"; //NON-NLS
|
||||
//query += " LIMIT " + MAX_OBJECTS;
|
||||
return query;
|
||||
}
|
||||
|
||||
private List<AbstractFile> runQuery() {
|
||||
List<AbstractFile> ret = new ArrayList<AbstractFile>();
|
||||
try {
|
||||
List<AbstractFile> found = skCase.findAllFilesWhere(createQuery());
|
||||
for (AbstractFile c : found) {
|
||||
ret.add(c);
|
||||
}
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Couldn't get search results", ex); //NON-NLS
|
||||
}
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get children count without actually loading all nodes
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
long calculateItems() {
|
||||
try {
|
||||
return skCase.countFilesWhere(createQuery());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting recent files search view count", ex); //NON-NLS
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(Content key) {
|
||||
return key.accept(new ContentVisitor.Default<AbstractNode>() {
|
||||
@Override
|
||||
public FileNode visit(File f) {
|
||||
return new FileNode(f, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DirectoryNode visit(Directory d) {
|
||||
return new DirectoryNode(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalFileNode visit(DerivedFile f) {
|
||||
return new LocalFileNode(f);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalFileNode visit(LocalFile f) {
|
||||
return new LocalFileNode(f);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractNode defaultVisit(Content di) {
|
||||
throw new UnsupportedOperationException(
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"RecentFilesFilterChildren.exception.defaultVisit.msg",
|
||||
di.toString()));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -1,94 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2014 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.Locale;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.datamodel.RecentFiles.RecentFilesFilter;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
/**
|
||||
* Node for recent files filter
|
||||
*/
|
||||
public class RecentFilesFilterNode extends DisplayableItemNode {
|
||||
|
||||
SleuthkitCase skCase;
|
||||
RecentFilesFilter filter;
|
||||
private final static Logger logger = Logger.getLogger(RecentFilesFilterNode.class.getName());
|
||||
|
||||
RecentFilesFilterNode(SleuthkitCase skCase, RecentFilesFilter filter, Calendar lastDay) {
|
||||
super(Children.create(new RecentFilesFilterChildren(filter, skCase, lastDay), true), Lookups.singleton(filter.getDisplayName()));
|
||||
super.setName(filter.getName());
|
||||
this.skCase = skCase;
|
||||
this.filter = filter;
|
||||
Calendar prevDay = (Calendar) lastDay.clone();
|
||||
prevDay.add(Calendar.DATE, -filter.getDurationDays());
|
||||
String tooltip = prevDay.getDisplayName(Calendar.MONTH, Calendar.LONG, Locale.ENGLISH) + " "
|
||||
+ prevDay.get(Calendar.DATE) + ", "
|
||||
+ prevDay.get(Calendar.YEAR);
|
||||
this.setShortDescription(tooltip);
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/recent_files.png"); //NON-NLS
|
||||
|
||||
//get count of children without preloading all children nodes
|
||||
final long count = new RecentFilesFilterChildren(filter, skCase, lastDay).calculateItems();
|
||||
super.setDisplayName(filter.getDisplayName() + " (" + count + ")");
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(
|
||||
NbBundle.getMessage(this.getClass(), "RecentFilesFilterNode.createSheet.filterType.name"),
|
||||
NbBundle.getMessage(this.getClass(), "RecentFilesFilterNode.createSheet.filterType.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "RecentFilesFilterNode.createSheet.filterType.desc"),
|
||||
filter.getDisplayName()));
|
||||
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
if (filter == null) {
|
||||
return getClass().getName();
|
||||
} else {
|
||||
return getClass().getName() + filter.getName();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,71 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2014 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
/**
|
||||
* Node for recent files
|
||||
*/
|
||||
public class RecentFilesNode extends DisplayableItemNode {
|
||||
|
||||
private static final String NAME = NbBundle.getMessage(RecentFilesNode.class, "RecentFilesNode.name.text");
|
||||
|
||||
RecentFilesNode(SleuthkitCase skCase) {
|
||||
super(Children.create(new RecentFilesChildren(skCase), true), Lookups.singleton(NAME));
|
||||
super.setName(NAME);
|
||||
super.setDisplayName(NAME);
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/recent_files.png"); //NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLeafTypeNode() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Sheet createSheet() {
|
||||
Sheet sheet = super.createSheet();
|
||||
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
|
||||
if (sheetSet == null) {
|
||||
sheetSet = Sheet.createPropertiesSet();
|
||||
sheet.put(sheetSet);
|
||||
}
|
||||
|
||||
sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "RecentFilesNode.createSheet.name.name"),
|
||||
NbBundle.getMessage(this.getClass(), "RecentFilesNode.createSheet.name.displayName"),
|
||||
NbBundle.getMessage(this.getClass(), "RecentFilesNode.createSheet.name.desc"),
|
||||
NAME));
|
||||
return sheet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getItemType() {
|
||||
return getClass().getName();
|
||||
}
|
||||
}
|
@ -82,32 +82,6 @@ public class RootContentChildren extends Children.Keys<Object> {
|
||||
* Set Hits, etc.).
|
||||
*/
|
||||
static class CreateAutopsyNodeVisitor extends AutopsyItemVisitor.Default<AbstractNode> {
|
||||
|
||||
@Override
|
||||
public AbstractNode visit(RecentFiles rf) {
|
||||
return new RecentFilesNode(rf.getSleuthkitCase());
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractNode visit(KeywordHits kh) {
|
||||
return kh.new RootNode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractNode visit(HashsetHits hh) {
|
||||
return hh.new RootNode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractNode visit(InterestingHits ih) {
|
||||
return ih.new RootNode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractNode visit(EmailExtracted ee) {
|
||||
return ee.new RootNode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractNode visit(Tags tagsNodeKey) {
|
||||
return tagsNodeKey.new RootNode(tagsNodeKey.filteringDataSourceObjId());
|
||||
|
@ -50,6 +50,7 @@ import javax.swing.event.PopupMenuEvent;
|
||||
import javax.swing.event.PopupMenuListener;
|
||||
import javax.swing.tree.TreeSelectionModel;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.openide.explorer.ExplorerManager;
|
||||
import org.openide.explorer.ExplorerUtils;
|
||||
import org.openide.explorer.view.BeanTreeView;
|
||||
@ -77,9 +78,7 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings;
|
||||
import org.sleuthkit.autopsy.datamodel.AnalysisResults;
|
||||
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactNode;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
import org.sleuthkit.autopsy.datamodel.EmailExtracted;
|
||||
import org.sleuthkit.autopsy.datamodel.EmptyNode;
|
||||
import org.sleuthkit.autopsy.datamodel.KeywordHits;
|
||||
import org.sleuthkit.autopsy.datamodel.AutopsyTreeChildFactory;
|
||||
import org.sleuthkit.autopsy.datamodel.DataArtifacts;
|
||||
import org.sleuthkit.autopsy.datamodel.OsAccounts;
|
||||
@ -90,6 +89,9 @@ import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
|
||||
import org.sleuthkit.autopsy.corecomponents.SelectionResponder;
|
||||
import org.sleuthkit.autopsy.datamodel.CreditCards;
|
||||
import org.sleuthkit.autopsy.datamodel.accounts.BINRange;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.EmailsDAO;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.AnalysisResultTypeFactory;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.AnalysisResultTypeFactory.KeywordSetFactory;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.ChildNodeSelectionInfo.BlackboardArtifactNodeSelectionInfo;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.TreeNode;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.MimeParentNode;
|
||||
@ -1464,11 +1466,7 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
}
|
||||
}
|
||||
if (listName == null) {
|
||||
if (regex == null) { //using same labels used for creation
|
||||
listName = NbBundle.getMessage(KeywordHits.class, "KeywordHits.simpleLiteralSearch.text");
|
||||
} else {
|
||||
listName = NbBundle.getMessage(KeywordHits.class, "KeywordHits.singleRegexSearch.text");
|
||||
}
|
||||
listName = NbBundle.getMessage(KeywordSetFactory.class, "AnalysisResultTypeFactory_adHocName");
|
||||
}
|
||||
Node listNode = keywordRootChilds.findChild(listName);
|
||||
if (listNode == null) {
|
||||
@ -1556,22 +1554,22 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
private Node getEmailNode(Children typesChildren, BlackboardArtifact art) {
|
||||
Node emailMsgRootNode = typesChildren.findChild(art.getArtifactTypeName());
|
||||
Children emailMsgRootChilds = emailMsgRootNode.getChildren();
|
||||
Map<String, String> parsedPath = null;
|
||||
Pair<String, String> parsedPath = null;
|
||||
try {
|
||||
List<BlackboardAttribute> attributes = art.getAttributes();
|
||||
for (BlackboardAttribute att : attributes) {
|
||||
int typeId = att.getAttributeType().getTypeID();
|
||||
if (typeId == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH.getTypeID()) {
|
||||
parsedPath = EmailExtracted.parsePath(att.getValueString());
|
||||
parsedPath = EmailsDAO.getPathAccountFolder(att.getValueString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (parsedPath == null) {
|
||||
return null;
|
||||
}
|
||||
Node defaultNode = emailMsgRootChilds.findChild(parsedPath.get(NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultAcct.text")));
|
||||
Node defaultNode = emailMsgRootChilds.findChild(parsedPath.getLeft());
|
||||
Children defaultChildren = defaultNode.getChildren();
|
||||
return defaultChildren.findChild(parsedPath.get(NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultFolder.text")));
|
||||
return defaultChildren.findChild(parsedPath.getRight());
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.WARNING, "Error retrieving attributes", ex); //NON-NLS
|
||||
return null;
|
||||
|
@ -141,7 +141,7 @@ public class EmailsDAO extends AbstractDAO {
|
||||
*
|
||||
* @return The pair of the account and folder or null if undetermined.
|
||||
*/
|
||||
private static Pair<String, String> getPathAccountFolder(String pathVal) {
|
||||
public static Pair<String, String> getPathAccountFolder(String pathVal) {
|
||||
String[] pieces = pathVal.split(PATH_DELIMITER);
|
||||
return pieces.length < 4
|
||||
? Pair.of("", "")
|
||||
|
@ -253,7 +253,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
|
||||
@Messages({
|
||||
"AnalysisResultTypeFactory_adHocName=Ad Hoc Results"
|
||||
})
|
||||
static class KeywordSetFactory extends TreeSetFactory {
|
||||
public static class KeywordSetFactory extends TreeSetFactory {
|
||||
|
||||
public KeywordSetFactory(Long dataSourceId) {
|
||||
super(BlackboardArtifact.Type.TSK_KEYWORD_HIT, dataSourceId, Bundle.AnalysisResultTypeFactory_adHocName());
|
||||
|
Loading…
x
Reference in New Issue
Block a user