mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-06 21:00:22 +00:00
Merge remote-tracking branch 'upstream/new_table_load' into 8121_dsTreeDAO
# Conflicts: # Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED # Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java # Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java
This commit is contained in:
commit
0420a12c4c
@ -19,8 +19,9 @@
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.AnalysisResultTypeFactory;
|
||||
|
||||
/**
|
||||
* Analysis Results node support.
|
||||
@ -41,7 +42,14 @@ public class AnalysisResults implements AutopsyVisitableItem {
|
||||
/**
|
||||
* Parent node of all analysis results.
|
||||
*/
|
||||
static class RootNode extends Artifacts.BaseArtifactNode {
|
||||
public static class RootNode extends Artifacts.BaseArtifactNode {
|
||||
|
||||
private static Children getChildren(long filteringDSObjId) {
|
||||
return Children.create(
|
||||
new AnalysisResultTypeFactory(filteringDSObjId > 0 ? filteringDSObjId : null), true);
|
||||
}
|
||||
|
||||
private final long filteringDSObjId;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
@ -52,10 +60,15 @@ public class AnalysisResults implements AutopsyVisitableItem {
|
||||
* equal to 0.
|
||||
*/
|
||||
RootNode(long filteringDSObjId) {
|
||||
super(Children.create(new Artifacts.TypeFactory(BlackboardArtifact.Category.ANALYSIS_RESULT, filteringDSObjId), true),
|
||||
super(getChildren(filteringDSObjId),
|
||||
"org/sleuthkit/autopsy/images/analysis_result.png",
|
||||
AnalysisResults.getName(),
|
||||
AnalysisResults.getName());
|
||||
this.filteringDSObjId = filteringDSObjId;
|
||||
}
|
||||
|
||||
public Node clone() {
|
||||
return new AnalysisResults.RootNode(this.filteringDSObjId);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,9 +18,13 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.DataArtifactTypeFactory;
|
||||
|
||||
/**
|
||||
* Analysis Results node support.
|
||||
@ -29,6 +33,8 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
"DataArtifacts_name=Data Artifacts",})
|
||||
public class DataArtifacts implements AutopsyVisitableItem {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(DataArtifacts.class.getName());
|
||||
|
||||
/**
|
||||
* Returns the name of this node that is the key in the children object.
|
||||
*
|
||||
@ -41,7 +47,13 @@ public class DataArtifacts implements AutopsyVisitableItem {
|
||||
/**
|
||||
* Parent node of all data artifacts.
|
||||
*/
|
||||
static class RootNode extends Artifacts.BaseArtifactNode {
|
||||
public static class RootNode extends Artifacts.BaseArtifactNode {
|
||||
|
||||
private static Children getChildren(long filteringDSObjId) {
|
||||
return Children.create(
|
||||
new DataArtifactTypeFactory(filteringDSObjId > 0 ? filteringDSObjId : null), true);
|
||||
}
|
||||
private final long filteringDSObjId;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
@ -52,10 +64,15 @@ public class DataArtifacts implements AutopsyVisitableItem {
|
||||
* equal to 0.
|
||||
*/
|
||||
RootNode(long filteringDSObjId) {
|
||||
super(Children.create(new Artifacts.TypeFactory(BlackboardArtifact.Category.DATA_ARTIFACT, filteringDSObjId), true),
|
||||
super(getChildren(filteringDSObjId),
|
||||
"org/sleuthkit/autopsy/images/extracted_content.png",
|
||||
DataArtifacts.getName(),
|
||||
DataArtifacts.getName());
|
||||
this.filteringDSObjId = filteringDSObjId;
|
||||
}
|
||||
|
||||
public Node clone() {
|
||||
return new RootNode(this.filteringDSObjId);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,12 +18,14 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel.utils;
|
||||
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
|
||||
/**
|
||||
* Utility methods for handling icons
|
||||
*/
|
||||
public final class IconsUtil {
|
||||
|
||||
private IconsUtil() {
|
||||
|
||||
}
|
||||
@ -133,6 +135,13 @@ public final class IconsUtil {
|
||||
imageFile = "previously-unseen.png"; //NON-NLS
|
||||
} else if (typeID == ARTIFACT_TYPE.TSK_PREVIOUSLY_NOTABLE.getTypeID()) {
|
||||
imageFile = "red-circle-exclamation.png"; //NON-NLS
|
||||
} else if (typeID == BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()) {
|
||||
imageFile = "hashset_hits.png";
|
||||
} else if (typeID == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) {
|
||||
imageFile = "keyword_hits.png";
|
||||
} else if (typeID == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()
|
||||
|| typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID()) {
|
||||
imageFile = "interesting_item.png";
|
||||
} else {
|
||||
imageFile = "artifact-icon.png"; //NON-NLS
|
||||
}
|
||||
|
@ -25,9 +25,12 @@ import org.openide.nodes.Children;
|
||||
import org.sleuthkit.autopsy.datamodel.DirectoryNode;
|
||||
import org.openide.nodes.FilterNode;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.Lookup;
|
||||
import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode;
|
||||
import org.sleuthkit.autopsy.datamodel.AbstractContentNode;
|
||||
import org.sleuthkit.autopsy.datamodel.AnalysisResults;
|
||||
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactNode;
|
||||
import org.sleuthkit.autopsy.datamodel.DataArtifacts;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
|
||||
import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
|
||||
import org.sleuthkit.autopsy.datamodel.FileNode;
|
||||
@ -38,6 +41,7 @@ import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode;
|
||||
import org.sleuthkit.autopsy.datamodel.SlackFileNode;
|
||||
import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode;
|
||||
import org.sleuthkit.autopsy.datamodel.VolumeNode;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.TreeNode;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
@ -83,7 +87,13 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
|
||||
*/
|
||||
@Override
|
||||
protected Node[] createNodes(Node origNode) {
|
||||
if (origNode == null || !(origNode instanceof DisplayableItemNode)) {
|
||||
if (origNode instanceof DataArtifacts.RootNode) {
|
||||
Node cloned = ((DataArtifacts.RootNode) origNode).clone();
|
||||
return new Node[]{cloned};
|
||||
} else if (origNode instanceof AnalysisResults.RootNode) {
|
||||
Node cloned = ((AnalysisResults.RootNode) origNode).clone();
|
||||
return new Node[]{cloned};
|
||||
} else if (origNode == null || !(origNode instanceof DisplayableItemNode)) {
|
||||
return new Node[]{};
|
||||
}
|
||||
|
||||
@ -119,7 +129,7 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
|
||||
&& !((Directory) c).getName().equals(".."))) {
|
||||
ret = false;
|
||||
break;
|
||||
} else if(AbstractContentNode.contentHasVisibleContentChildren(c)){
|
||||
} else if (AbstractContentNode.contentHasVisibleContentChildren(c)) {
|
||||
//fie has children, such as derived files
|
||||
ret = false;
|
||||
break;
|
||||
@ -202,7 +212,7 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
|
||||
if ((childContent instanceof AbstractFile) && ((AbstractFile) childContent).isDir()) {
|
||||
return false;
|
||||
} else {
|
||||
if(AbstractContentNode.contentHasVisibleContentChildren(childContent)){
|
||||
if (AbstractContentNode.contentHasVisibleContentChildren(childContent)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -253,8 +263,8 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
|
||||
@Override
|
||||
public Boolean visit(BlackboardArtifactNode bbafn) {
|
||||
// Only show Message arttifacts with children
|
||||
if ( (bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()) ||
|
||||
(bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_MESSAGE.getTypeID()) ) {
|
||||
if ((bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID())
|
||||
|| (bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_MESSAGE.getTypeID())) {
|
||||
return bbafn.hasContentChildren();
|
||||
}
|
||||
|
||||
@ -303,7 +313,6 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
|
||||
//return vdn.hasContentChildren();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Boolean visit(LocalDirectoryNode ldn) {
|
||||
return true;
|
||||
@ -318,8 +327,8 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
|
||||
public Boolean visit(BlackboardArtifactNode bbafn) {
|
||||
|
||||
// Only show Message arttifacts with children
|
||||
if ( (bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()) ||
|
||||
(bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_MESSAGE.getTypeID()) ) {
|
||||
if ((bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID())
|
||||
|| (bbafn.getArtifact().getArtifactTypeID() == ARTIFACT_TYPE.TSK_MESSAGE.getTypeID())) {
|
||||
return bbafn.hasContentChildren();
|
||||
}
|
||||
|
||||
|
@ -866,10 +866,17 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat
|
||||
try {
|
||||
Node treeNode = DirectoryTreeTopComponent.this.getSelectedNode();
|
||||
if (treeNode != null) {
|
||||
Node originNode = ((DirectoryTreeFilterNode) treeNode).getOriginal();
|
||||
|
||||
Node originNode;
|
||||
if (treeNode instanceof DirectoryTreeFilterNode) {
|
||||
originNode = ((DirectoryTreeFilterNode) treeNode).getOriginal();
|
||||
} else {
|
||||
originNode = treeNode;
|
||||
}
|
||||
|
||||
//set node, wrap in filter node first to filter out children
|
||||
Node drfn = new DataResultFilterNode(originNode, DirectoryTreeTopComponent.this.em);
|
||||
if(originNode instanceof SelectionResponder) {
|
||||
if (originNode instanceof SelectionResponder) {
|
||||
((SelectionResponder) originNode).respondSelection(dataResult);
|
||||
} else if (FileTypesByMimeType.isEmptyMimeTypeNode(originNode)) {
|
||||
//Special case for when File Type Identification has not yet been run and
|
||||
|
@ -21,13 +21,25 @@ package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.sql.SQLException;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.AnalysisResult;
|
||||
@ -50,6 +62,8 @@ import org.sleuthkit.datamodel.VolumeSystem;
|
||||
*/
|
||||
public class AnalysisResultDAO extends BlackboardArtifactDAO {
|
||||
|
||||
private static Logger logger = Logger.getLogger(AnalysisResultDAO.class.getName());
|
||||
|
||||
private static AnalysisResultDAO instance = null;
|
||||
|
||||
@NbBundle.Messages({
|
||||
@ -106,30 +120,34 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The set of types that are not shown in the tree.
|
||||
*/
|
||||
public static Set<BlackboardArtifact.Type> getIgnoredTreeTypes() {
|
||||
return BlackboardArtifactDAO.getIgnoredTreeTypes();
|
||||
}
|
||||
|
||||
// TODO We can probably combine all the caches at some point
|
||||
private final Cache<SearchParams<AnalysisResultSearchParam>, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build();
|
||||
private final Cache<SearchParams<BlackboardArtifactSearchParam>, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build();
|
||||
private final Cache<SearchParams<HashHitSearchParam>, AnalysisResultTableSearchResultsDTO> hashHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
|
||||
private final Cache<SearchParams<KeywordHitSearchParam>, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
|
||||
|
||||
private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams<AnalysisResultSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
|
||||
SleuthkitCase skCase = getCase();
|
||||
Blackboard blackboard = skCase.getBlackboard();
|
||||
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
|
||||
|
||||
// get analysis results
|
||||
List<BlackboardArtifact> arts = new ArrayList<>();
|
||||
if (dataSourceId != null) {
|
||||
arts.addAll(blackboard.getAnalysisResultsByType(artType.getTypeID(), dataSourceId));
|
||||
} else {
|
||||
arts.addAll(blackboard.getAnalysisResultsByType(artType.getTypeID()));
|
||||
}
|
||||
String pagedWhereClause = getWhereClause(cacheKey);
|
||||
arts.addAll(blackboard.getAnalysisResultsWhere(pagedWhereClause));
|
||||
blackboard.loadBlackboardAttributes(arts);
|
||||
|
||||
List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
|
||||
TableData tableData = createTableData(artType, pagedArtifacts);
|
||||
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
|
||||
// Get total number of results
|
||||
long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
|
||||
|
||||
TableData tableData = createTableData(artType, arts);
|
||||
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount);
|
||||
}
|
||||
|
||||
private AnalysisResultTableSearchResultsDTO fetchSetNameHitsForTable(SearchParams<? extends AnalysisResultSetSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
@ -140,26 +158,28 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
|
||||
|
||||
// Get all hash set hits
|
||||
List<AnalysisResult> allHashHits;
|
||||
// We currently can't make a query on the set name field because need to use a prepared statement
|
||||
String originalWhereClause = " artifacts.artifact_type_id = " + artType.getTypeID() + " ";
|
||||
if (dataSourceId != null) {
|
||||
allHashHits = blackboard.getAnalysisResultsByType(artType.getTypeID(), dataSourceId);
|
||||
} else {
|
||||
allHashHits = blackboard.getAnalysisResultsByType(artType.getTypeID());
|
||||
originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " ";
|
||||
}
|
||||
|
||||
List<BlackboardArtifact> allHashHits = new ArrayList<>();
|
||||
allHashHits.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause));
|
||||
blackboard.loadBlackboardAttributes(allHashHits);
|
||||
|
||||
// Filter for the selected set
|
||||
List<BlackboardArtifact> arts = new ArrayList<>();
|
||||
for (AnalysisResult art : allHashHits) {
|
||||
List<BlackboardArtifact> hashHits = new ArrayList<>();
|
||||
for (BlackboardArtifact art : allHashHits) {
|
||||
BlackboardAttribute setNameAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME);
|
||||
if ((setNameAttr != null) && cacheKey.getParamData().getSetName().equals(setNameAttr.getValueString())) {
|
||||
arts.add(art);
|
||||
hashHits.add(art);
|
||||
}
|
||||
}
|
||||
|
||||
List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
|
||||
List<BlackboardArtifact> pagedArtifacts = getPaged(hashHits, cacheKey);
|
||||
TableData tableData = createTableData(artType, pagedArtifacts);
|
||||
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
|
||||
return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), hashHits.size());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -237,7 +257,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
|
||||
+ "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "<null>" : artifactKey.getDataSourceId()));
|
||||
}
|
||||
|
||||
SearchParams<AnalysisResultSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
|
||||
SearchParams<BlackboardArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
|
||||
if (hardRefresh) {
|
||||
analysisResultCache.invalidate(searchParams);
|
||||
}
|
||||
@ -264,6 +284,8 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
|
||||
return hashHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams));
|
||||
}
|
||||
|
||||
// TODO - JIRA-8117
|
||||
// This needs to use more than just the set name
|
||||
public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
|
||||
if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) {
|
||||
throw new IllegalArgumentException(MessageFormat.format("Illegal data. "
|
||||
@ -291,6 +313,145 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
|
||||
keywordHitCache.invalidateAll();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a search results dto containing rows of counts data.
|
||||
*
|
||||
* @param dataSourceId The data source object id for which the results
|
||||
* should be filtered or null if no data source
|
||||
* filtering.
|
||||
*
|
||||
* @return The results where rows are row of AnalysisResultSearchParam.
|
||||
*
|
||||
* @throws ExecutionException
|
||||
*/
|
||||
public TreeResultsDTO<AnalysisResultSearchParam> getAnalysisResultCounts(Long dataSourceId) throws ExecutionException {
|
||||
try {
|
||||
// get row dto's sorted by display name
|
||||
Map<BlackboardArtifact.Type, Long> typeCounts = getCounts(BlackboardArtifact.Category.ANALYSIS_RESULT, dataSourceId);
|
||||
List<TreeResultsDTO.TreeItemDTO<AnalysisResultSearchParam>> treeItemRows = typeCounts.entrySet().stream()
|
||||
.map(entry -> {
|
||||
return new TreeResultsDTO.TreeItemDTO<>(
|
||||
BlackboardArtifact.Category.ANALYSIS_RESULT.name(),
|
||||
new AnalysisResultSearchParam(entry.getKey(), dataSourceId),
|
||||
entry.getKey().getTypeID(),
|
||||
entry.getKey().getDisplayName(),
|
||||
entry.getValue());
|
||||
})
|
||||
.sorted(Comparator.comparing(countRow -> countRow.getDisplayName()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// return results
|
||||
return new TreeResultsDTO<>(treeItemRows);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
throw new ExecutionException("An error occurred while fetching analysis result counts.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
// GVDTODO code to use in a future PR
|
||||
// /**
|
||||
// *
|
||||
// * @param type The artifact type to filter on.
|
||||
// * @param setNameAttr The blackboard attribute denoting the set name.
|
||||
// * @param dataSourceId The data source object id for which the results
|
||||
// * should be filtered or null if no data source
|
||||
// * filtering.
|
||||
// *
|
||||
// * @return A mapping of set names to their counts.
|
||||
// *
|
||||
// * @throws IllegalArgumentException
|
||||
// * @throws ExecutionException
|
||||
// */
|
||||
// Map<String, Long> getSetCountsMap(BlackboardArtifact.Type type, BlackboardAttribute.Type setNameAttr, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
|
||||
// if (dataSourceId != null && dataSourceId <= 0) {
|
||||
// throw new IllegalArgumentException("Expected data source id to be > 0");
|
||||
// }
|
||||
//
|
||||
// try {
|
||||
// // get artifact types and counts
|
||||
// SleuthkitCase skCase = getCase();
|
||||
// String query = " set_name, COUNT(*) AS count \n"
|
||||
// + "FROM ( \n"
|
||||
// + " SELECT art.artifact_id, \n"
|
||||
// + " (SELECT value_text \n"
|
||||
// + " FROM blackboard_attributes attr \n"
|
||||
// + " WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = " + setNameAttr.getTypeID() + " LIMIT 1) AS set_name \n"
|
||||
// + " FROM blackboard_artifacts art \n"
|
||||
// + " WHERE art.artifact_type_id = " + type.getTypeID() + " \n"
|
||||
// + ((dataSourceId == null) ? "" : " AND art.data_source_obj_id = " + dataSourceId + " \n")
|
||||
// + ") \n"
|
||||
// + "GROUP BY set_name";
|
||||
//
|
||||
// Map<String, Long> setCounts = new HashMap<>();
|
||||
// skCase.getCaseDbAccessManager().select(query, (resultSet) -> {
|
||||
// try {
|
||||
// while (resultSet.next()) {
|
||||
// String setName = resultSet.getString("set_name");
|
||||
// long count = resultSet.getLong("count");
|
||||
// setCounts.put(setName, count);
|
||||
// }
|
||||
// } catch (SQLException ex) {
|
||||
// logger.log(Level.WARNING, "An error occurred while fetching set name counts.", ex);
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// return setCounts;
|
||||
// } catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
// throw new ExecutionException("An error occurred while fetching set counts", ex);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Get counts for individual sets of the provided type to be used in the
|
||||
// * tree view.
|
||||
// *
|
||||
// * @param type The blackboard artifact type.
|
||||
// * @param dataSourceId The data source object id for which the results
|
||||
// * should be filtered or null if no data source
|
||||
// * filtering.
|
||||
// * @param nullSetName For artifacts with no set, this is the name to
|
||||
// * provide. If null, artifacts without a set name will
|
||||
// * be ignored.
|
||||
// * @param converter Means of converting from data source id and set name
|
||||
// * to an AnalysisResultSetSearchParam
|
||||
// *
|
||||
// * @return The sets along with counts to display.
|
||||
// *
|
||||
// * @throws IllegalArgumentException
|
||||
// * @throws ExecutionException
|
||||
// */
|
||||
// private <T extends AnalysisResultSetSearchParam> TreeResultsDTO<T> getSetCounts(
|
||||
// BlackboardArtifact.Type type,
|
||||
// Long dataSourceId,
|
||||
// String nullSetName,
|
||||
// BiFunction<Long, String, T> converter) throws IllegalArgumentException, ExecutionException {
|
||||
//
|
||||
// List<TreeItemDTO<T>> allSets
|
||||
// = getSetCountsMap(type, BlackboardAttribute.Type.TSK_SET_NAME, dataSourceId).entrySet().stream()
|
||||
// .filter(entry -> nullSetName != null || entry.getKey() != null)
|
||||
// .map(entry -> {
|
||||
// return new TreeItemDTO<>(
|
||||
// type.getTypeName(),
|
||||
// converter.apply(dataSourceId, entry.getKey()),
|
||||
// entry.getKey(),
|
||||
// entry.getKey() == null ? nullSetName : entry.getKey(),
|
||||
// entry.getValue());
|
||||
// })
|
||||
// .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName()))
|
||||
// .collect(Collectors.toList());
|
||||
//
|
||||
// return new TreeResultsDTO<>(allSets);
|
||||
// }
|
||||
//
|
||||
// public TreeResultsDTO<HashHitSearchParam> getHashHitSetCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException {
|
||||
// return getSetCounts(BlackboardArtifact.Type.TSK_HASHSET_HIT, dataSourceId, null, (dsId, setName) -> new HashHitSearchParam(dsId, setName));
|
||||
// }
|
||||
//
|
||||
// public TreeResultsDTO<AnalysisResultSetSearchParam> getSetCounts(BlackboardArtifact.Type type, Long dataSourceId, String nullSetName) throws IllegalArgumentException, ExecutionException {
|
||||
// return getSetCounts(type, dataSourceId, nullSetName, (dsId, setName) -> new AnalysisResultSetSearchParam(type, dsId, setName));
|
||||
// }
|
||||
|
||||
|
||||
/**
|
||||
* Handles basic functionality of fetching and paging of analysis results.
|
||||
*/
|
||||
|
@ -18,57 +18,14 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
|
||||
/**
|
||||
* Key for analysis result in order to retrieve data from DAO.
|
||||
*/
|
||||
public class AnalysisResultSearchParam {
|
||||
private final BlackboardArtifact.Type artifactType;
|
||||
private final Long dataSourceId;
|
||||
public class AnalysisResultSearchParam extends BlackboardArtifactSearchParam {
|
||||
|
||||
public AnalysisResultSearchParam(BlackboardArtifact.Type artifactType, Long dataSourceId) {
|
||||
this.artifactType = artifactType;
|
||||
this.dataSourceId = dataSourceId;
|
||||
super(artifactType, dataSourceId);
|
||||
}
|
||||
|
||||
public BlackboardArtifact.Type getArtifactType() {
|
||||
return artifactType;
|
||||
}
|
||||
|
||||
public Long getDataSourceId() {
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 7;
|
||||
hash = 79 * hash + Objects.hashCode(this.artifactType);
|
||||
hash = 79 * hash + Objects.hashCode(this.dataSourceId);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final AnalysisResultSearchParam other = (AnalysisResultSearchParam) obj;
|
||||
if (!Objects.equals(this.artifactType, other.artifactType)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
/**
|
||||
* Base class for search params for analysis results that filter by set name.
|
||||
*/
|
||||
abstract class AnalysisResultSetSearchParam extends AnalysisResultSearchParam {
|
||||
public class AnalysisResultSetSearchParam extends AnalysisResultSearchParam {
|
||||
|
||||
private final String setName;
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@ -10,13 +11,23 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.python.google.common.collect.Sets;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_ASSOCIATED_OBJECT;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_DATA_SOURCE_USAGE;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_GEN_INFO;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.Type.TSK_TL_EVENT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
@ -62,6 +73,8 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
})
|
||||
abstract class BlackboardArtifactDAO {
|
||||
|
||||
private static Logger logger = Logger.getLogger(BlackboardArtifactDAO.class.getName());
|
||||
|
||||
// GVDTODO there is a different standard for normal attr strings and email attr strings
|
||||
static final int STRING_LENGTH_MAX = 160;
|
||||
static final String ELLIPSIS = "...";
|
||||
@ -112,6 +125,31 @@ abstract class BlackboardArtifactDAO {
|
||||
Bundle.BlackboardArtifactDAO_columnKeys_dataSource_description()
|
||||
);
|
||||
|
||||
/**
|
||||
* Types that should not be shown in the tree.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
private static final Set<BlackboardArtifact.Type> IGNORED_TYPES = Sets.newHashSet(
|
||||
// these are shown in other parts of the UI (and different node types)
|
||||
TSK_DATA_SOURCE_USAGE,
|
||||
TSK_GEN_INFO,
|
||||
new BlackboardArtifact.Type(TSK_DOWNLOAD_SOURCE),
|
||||
TSK_TL_EVENT,
|
||||
//This is not meant to be shown in the UI at all. It is more of a meta artifact.
|
||||
TSK_ASSOCIATED_OBJECT
|
||||
);
|
||||
|
||||
private static final String IGNORED_TYPES_SQL_SET = IGNORED_TYPES.stream()
|
||||
.map(tp -> Integer.toString(tp.getTypeID()))
|
||||
.collect(Collectors.joining(", "));
|
||||
|
||||
/**
|
||||
* @return The set of types that are not shown in the tree.
|
||||
*/
|
||||
protected static Set<BlackboardArtifact.Type> getIgnoredTreeTypes() {
|
||||
return IGNORED_TYPES;
|
||||
}
|
||||
|
||||
TableData createTableData(BlackboardArtifact.Type artType, List<BlackboardArtifact> arts) throws TskCoreException, NoCurrentCaseException {
|
||||
Map<Long, Map<BlackboardAttribute.Type, Object>> artifactAttributes = new HashMap<>();
|
||||
for (BlackboardArtifact art : arts) {
|
||||
@ -219,6 +257,40 @@ abstract class BlackboardArtifactDAO {
|
||||
.anyMatch(tp -> BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME.equals(tp.getValueType()));
|
||||
}
|
||||
|
||||
String getWhereClause(SearchParams<BlackboardArtifactSearchParam> cacheKey) {
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
|
||||
|
||||
String originalWhereClause = " artifacts.artifact_type_id = " + artType.getTypeID() + " ";
|
||||
if (dataSourceId != null) {
|
||||
originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " ";
|
||||
}
|
||||
|
||||
String pagedWhereClause = originalWhereClause
|
||||
+ " ORDER BY artifacts.obj_id ASC"
|
||||
+ (cacheKey.getMaxResultsCount() != null && cacheKey.getMaxResultsCount() > 0 ? " LIMIT " + cacheKey.getMaxResultsCount() : "")
|
||||
+ (cacheKey.getStartItem() > 0 ? " OFFSET " + cacheKey.getStartItem() : "");
|
||||
return pagedWhereClause;
|
||||
}
|
||||
|
||||
long getTotalResultsCount(SearchParams<BlackboardArtifactSearchParam> cacheKey, long currentPageSize) throws TskCoreException, NoCurrentCaseException {
|
||||
Blackboard blackboard = getCase().getBlackboard();
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
|
||||
|
||||
if ( (cacheKey.getStartItem() == 0) // offset is zero AND
|
||||
&& ( (cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max
|
||||
|| (cacheKey.getMaxResultsCount() == null)) ) { // OR max number of results was not specified
|
||||
return currentPageSize;
|
||||
} else {
|
||||
if (dataSourceId != null) {
|
||||
return blackboard.getArtifactsCount(artType.getTypeID(), dataSourceId);
|
||||
} else {
|
||||
return blackboard.getArtifactsCount(artType.getTypeID());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String getDataSourceName(Content srcContent) throws TskCoreException {
|
||||
Content dataSource = srcContent.getDataSource();
|
||||
if (dataSource != null) {
|
||||
@ -328,4 +400,46 @@ abstract class BlackboardArtifactDAO {
|
||||
this.rows = rows;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the count of each artifact type in the category.
|
||||
*
|
||||
* @param category The artifact type category.
|
||||
* @param dataSourceId The data source object id for which the results
|
||||
* should be filtered or null if no data source
|
||||
* filtering.
|
||||
*
|
||||
* @return The mapping of type to count.
|
||||
*
|
||||
* @throws NoCurrentCaseException
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
Map<BlackboardArtifact.Type, Long> getCounts(BlackboardArtifact.Category category, Long dataSourceId) throws NoCurrentCaseException, TskCoreException {
|
||||
|
||||
// get artifact types and counts
|
||||
SleuthkitCase skCase = getCase();
|
||||
String query = "artifact_type_id, COUNT(*) AS count "
|
||||
+ " FROM blackboard_artifacts "
|
||||
+ " WHERE artifact_type_id NOT IN (" + IGNORED_TYPES_SQL_SET + ") "
|
||||
+ " AND artifact_type_id IN "
|
||||
+ " (SELECT artifact_type_id FROM blackboard_artifact_types WHERE category_type = " + category.getID() + ")"
|
||||
+ (dataSourceId == null ? "" : (" AND data_source_obj_id = " + dataSourceId + " "))
|
||||
+ " GROUP BY artifact_type_id";
|
||||
Map<BlackboardArtifact.Type, Long> typeCounts = new HashMap<>();
|
||||
|
||||
skCase.getCaseDbAccessManager().select(query, (resultSet) -> {
|
||||
try {
|
||||
while (resultSet.next()) {
|
||||
int artifactTypeId = resultSet.getInt("artifact_type_id");
|
||||
BlackboardArtifact.Type type = skCase.getBlackboard().getArtifactType(artifactTypeId);
|
||||
long count = resultSet.getLong("count");
|
||||
typeCounts.put(type, count);
|
||||
}
|
||||
} catch (TskCoreException | SQLException ex) {
|
||||
logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex);
|
||||
}
|
||||
});
|
||||
|
||||
return typeCounts;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
|
||||
/**
|
||||
* Key for data artifact in order to retrieve data from DAO.
|
||||
*/
|
||||
public class BlackboardArtifactSearchParam {
|
||||
private final BlackboardArtifact.Type artifactType;
|
||||
private final Long dataSourceId;
|
||||
|
||||
public BlackboardArtifactSearchParam(BlackboardArtifact.Type artifactType, Long dataSourceId) {
|
||||
this.artifactType = artifactType;
|
||||
this.dataSourceId = dataSourceId;
|
||||
}
|
||||
|
||||
public BlackboardArtifact.Type getArtifactType() {
|
||||
return artifactType;
|
||||
}
|
||||
|
||||
public Long getDataSourceId() {
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 7;
|
||||
hash = 67 * hash + Objects.hashCode(this.artifactType);
|
||||
hash = 67 * hash + Objects.hashCode(this.dataSourceId);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final BlackboardArtifactSearchParam other = (BlackboardArtifactSearchParam) obj;
|
||||
if (!Objects.equals(this.artifactType, other.artifactType)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
@ -28,12 +28,6 @@ BlackboardArtifactDAO.columnKeys.score.name=Score
|
||||
BlackboardArtifactDAO.columnKeys.srcFile.description=Source Name
|
||||
BlackboardArtifactDAO.columnKeys.srcFile.displayName=Source Name
|
||||
BlackboardArtifactDAO.columnKeys.srcFile.name=Source Name
|
||||
CountsRowResultDTO_columns_count_description=Name
|
||||
CountsRowResultDTO_columns_count_displayName=Name
|
||||
CountsRowResultDTO_columns_count_name=displayName
|
||||
CountsRowResultDTO_columns_displayName_description=Name
|
||||
CountsRowResultDTO_columns_displayName_displayName=Name
|
||||
CountsRowResultDTO_columns_displayName_name=displayName
|
||||
FileExtDocumentFilter_html_displayName=HTML
|
||||
FileExtDocumentFilter_office_displayName=Office
|
||||
FileExtDocumentFilter_pdf_displayName=PDF
|
||||
@ -85,4 +79,24 @@ FileSystemColumnUtils.volumeColumns.flags=Flags
|
||||
FileSystemColumnUtils.volumeColumns.id=ID
|
||||
FileSystemColumnUtils.volumeColumns.length=Length in Sectors
|
||||
FileSystemColumnUtils.volumeColumns.startingSector=Starting Sector
|
||||
FileTag.name.text=File Tag
|
||||
FileTypesByMimeType.name.text=By MIME Type
|
||||
ResultTag.name.text=Result Tag
|
||||
TagsDAO.fileColumns.accessTimeColLbl=Accessed Time
|
||||
TagsDAO.fileColumns.changeTimeColLbl=Changed Time
|
||||
TagsDAO.fileColumns.commentColLbl=Comment
|
||||
TagsDAO.fileColumns.createdTimeColLbl=Created Time
|
||||
TagsDAO.fileColumns.filePathColLbl=File Path
|
||||
TagsDAO.fileColumns.md5HashColLbl=MD5 Hash
|
||||
TagsDAO.fileColumns.modifiedTimeColLbl=Modified Time
|
||||
TagsDAO.fileColumns.nameColLbl=Name
|
||||
TagsDAO.fileColumns.noDescription=No Description
|
||||
TagsDAO.fileColumns.originalName=Original Name
|
||||
TagsDAO.fileColumns.sizeColLbl=Size
|
||||
TagsDAO.fileColumns.userNameColLbl=User Name
|
||||
TagsDAO.tagColumns.commentColLbl=Comment
|
||||
TagsDAO.tagColumns.origNameColLbl=Original Name
|
||||
TagsDAO.tagColumns.sourceNameColLbl=Source Name
|
||||
TagsDAO.tagColumns.sourcePathColLbl=Source File Path
|
||||
TagsDAO.tagColumns.typeColLbl=Result Type
|
||||
TagsDAO.tagColumns.userNameColLbl=User Name
|
||||
|
@ -1,92 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
|
||||
/**
|
||||
*
|
||||
* A row result providing a category and a count for that category.
|
||||
*/
|
||||
@Messages({
|
||||
"CountsRowResultDTO_columns_displayName_name=displayName",
|
||||
"CountsRowResultDTO_columns_displayName_displayName=Name",
|
||||
"CountsRowResultDTO_columns_displayName_description=Name",
|
||||
"CountsRowResultDTO_columns_count_name=displayName",
|
||||
"CountsRowResultDTO_columns_count_displayName=Name",
|
||||
"CountsRowResultDTO_columns_count_description=Name"
|
||||
})
|
||||
public class CountsRowDTO implements RowDTO {
|
||||
|
||||
private static final String DEFAULT_TYPE_ID = "COUNTS";
|
||||
|
||||
public static ColumnKey DISPLAY_NAME_COL = new ColumnKey(
|
||||
Bundle.CountsRowResultDTO_columns_displayName_name(),
|
||||
Bundle.CountsRowResultDTO_columns_displayName_displayName(),
|
||||
Bundle.CountsRowResultDTO_columns_displayName_description());
|
||||
|
||||
public static ColumnKey COUNT_COL = new ColumnKey(
|
||||
Bundle.CountsRowResultDTO_columns_count_name(),
|
||||
Bundle.CountsRowResultDTO_columns_count_displayName(),
|
||||
Bundle.CountsRowResultDTO_columns_count_description());
|
||||
|
||||
private final long id;
|
||||
private final String displayName;
|
||||
private final long count;
|
||||
private final List<Object> cellValues;
|
||||
private final String typeId;
|
||||
|
||||
public CountsRowDTO(long id, String displayName, long count) {
|
||||
this(DEFAULT_TYPE_ID, id, displayName, count);
|
||||
}
|
||||
|
||||
public CountsRowDTO(String typeId, long id, String displayName, long count) {
|
||||
this.typeId = typeId;
|
||||
this.id = id;
|
||||
this.displayName = displayName;
|
||||
this.count = count;
|
||||
this.cellValues = ImmutableList.of(Arrays.asList(displayName, count));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Object> getCellValues() {
|
||||
return cellValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTypeId() {
|
||||
return typeId;
|
||||
}
|
||||
}
|
@ -23,7 +23,12 @@ import com.google.common.cache.CacheBuilder;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
@ -32,6 +37,7 @@ import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
@ -39,6 +45,8 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
public class DataArtifactDAO extends BlackboardArtifactDAO {
|
||||
|
||||
private static Logger logger = Logger.getLogger(DataArtifactDAO.class.getName());
|
||||
|
||||
private static DataArtifactDAO instance = null;
|
||||
|
||||
synchronized static DataArtifactDAO getInstance() {
|
||||
@ -49,25 +57,31 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
|
||||
return instance;
|
||||
}
|
||||
|
||||
private final Cache<SearchParams<DataArtifactSearchParam>, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build();
|
||||
|
||||
private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams<DataArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
Blackboard blackboard = getCase().getBlackboard();
|
||||
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
|
||||
|
||||
// get analysis results
|
||||
List<BlackboardArtifact> arts = new ArrayList<>();
|
||||
if (dataSourceId != null) {
|
||||
arts.addAll(blackboard.getDataArtifacts(artType.getTypeID(), dataSourceId));
|
||||
} else {
|
||||
arts.addAll(blackboard.getDataArtifacts(artType.getTypeID()));
|
||||
/**
|
||||
* @return The set of types that are not shown in the tree.
|
||||
*/
|
||||
public static Set<BlackboardArtifact.Type> getIgnoredTreeTypes() {
|
||||
return BlackboardArtifactDAO.getIgnoredTreeTypes();
|
||||
}
|
||||
|
||||
List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
|
||||
TableData tableData = createTableData(artType, pagedArtifacts);
|
||||
return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
|
||||
private final Cache<SearchParams<BlackboardArtifactSearchParam>, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build();
|
||||
|
||||
private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
|
||||
SleuthkitCase skCase = getCase();
|
||||
Blackboard blackboard = skCase.getBlackboard();
|
||||
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
|
||||
|
||||
String pagedWhereClause = getWhereClause(cacheKey);
|
||||
|
||||
List<BlackboardArtifact> arts = new ArrayList<>();
|
||||
arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause));
|
||||
blackboard.loadBlackboardAttributes(arts);
|
||||
|
||||
long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
|
||||
|
||||
TableData tableData = createTableData(artType, arts);
|
||||
return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -88,7 +102,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
|
||||
+ "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "<null>" : artifactKey.getDataSourceId()));
|
||||
}
|
||||
|
||||
SearchParams<DataArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
|
||||
SearchParams<BlackboardArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
|
||||
if (hardRefresh) {
|
||||
this.dataArtifactCache.invalidate(searchParams);
|
||||
}
|
||||
@ -105,12 +119,49 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a search results dto containing rows of counts data.
|
||||
*
|
||||
* @param dataSourceId The data source object id for which the results
|
||||
* should be filtered or null if no data source
|
||||
* filtering.
|
||||
*
|
||||
* @return The results where rows are CountsRowDTO of
|
||||
* DataArtifactSearchParam.
|
||||
*
|
||||
* @throws ExecutionException
|
||||
*/
|
||||
public TreeResultsDTO<DataArtifactSearchParam> getDataArtifactCounts(Long dataSourceId) throws ExecutionException {
|
||||
try {
|
||||
// get row dto's sorted by display name
|
||||
Map<BlackboardArtifact.Type, Long> typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId);
|
||||
List<TreeResultsDTO.TreeItemDTO<DataArtifactSearchParam>> treeItemRows = typeCounts.entrySet().stream()
|
||||
.map(entry -> {
|
||||
return new TreeResultsDTO.TreeItemDTO<>(
|
||||
BlackboardArtifact.Category.DATA_ARTIFACT.name(),
|
||||
new DataArtifactSearchParam(entry.getKey(), dataSourceId),
|
||||
entry.getKey().getTypeID(),
|
||||
entry.getKey().getDisplayName(),
|
||||
entry.getValue());
|
||||
})
|
||||
.sorted(Comparator.comparing(countRow -> countRow.getDisplayName()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// return results
|
||||
return new TreeResultsDTO<>(treeItemRows);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
throw new ExecutionException("An error occurred while fetching data artifact counts.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Handles fetching and paging of data artifacts.
|
||||
*/
|
||||
public static class DataArtifactFetcher extends DAOFetcher<DataArtifactSearchParam> {
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param params Parameters to handle fetching of data.
|
||||
*/
|
||||
public DataArtifactFetcher(DataArtifactSearchParam params) {
|
||||
|
@ -18,58 +18,14 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
|
||||
/**
|
||||
* Key for data artifact in order to retrieve data from DAO.
|
||||
*/
|
||||
public class DataArtifactSearchParam {
|
||||
private final BlackboardArtifact.Type artifactType;
|
||||
private final Long dataSourceId;
|
||||
public class DataArtifactSearchParam extends BlackboardArtifactSearchParam {
|
||||
|
||||
public DataArtifactSearchParam(BlackboardArtifact.Type artifactType, Long dataSourceId) {
|
||||
this.artifactType = artifactType;
|
||||
this.dataSourceId = dataSourceId;
|
||||
super (artifactType, dataSourceId);
|
||||
}
|
||||
|
||||
public BlackboardArtifact.Type getArtifactType() {
|
||||
return artifactType;
|
||||
}
|
||||
|
||||
public Long getDataSourceId() {
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 7;
|
||||
hash = 67 * hash + Objects.hashCode(this.artifactType);
|
||||
hash = 67 * hash + Objects.hashCode(this.dataSourceId);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final DataArtifactSearchParam other = (DataArtifactSearchParam) obj;
|
||||
if (!Objects.equals(this.artifactType, other.artifactType)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -38,6 +38,7 @@ public class MainDAO {
|
||||
private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance();
|
||||
private final ViewsDAO viewsDAO = ViewsDAO.getInstance();
|
||||
private final FileSystemDAO fileSystemDAO = FileSystemDAO.getInstance();
|
||||
private final TagsDAO tagsDAO = TagsDAO.getInstance();
|
||||
|
||||
public DataArtifactDAO getDataArtifactsDAO() {
|
||||
return dataArtifactDAO;
|
||||
@ -54,4 +55,8 @@ public class MainDAO {
|
||||
public FileSystemDAO getFileSystemDAO() {
|
||||
return fileSystemDAO;
|
||||
}
|
||||
|
||||
public TagsDAO getTagsDAO() {
|
||||
return tagsDAO;
|
||||
}
|
||||
}
|
||||
|
371
Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java
Executable file
371
Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java
Executable file
@ -0,0 +1,371 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.core.UserPreferences;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.Tag;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Provides information to populate the results viewer for data in the allTags
|
||||
section.
|
||||
*/
|
||||
@Messages({"TagsDAO.fileColumns.nameColLbl=Name",
|
||||
"TagsDAO.fileColumns.originalName=Original Name",
|
||||
"TagsDAO.fileColumns.filePathColLbl=File Path",
|
||||
"TagsDAO.fileColumns.commentColLbl=Comment",
|
||||
"TagsDAO.fileColumns.modifiedTimeColLbl=Modified Time",
|
||||
"TagsDAO.fileColumns.changeTimeColLbl=Changed Time",
|
||||
"TagsDAO.fileColumns.accessTimeColLbl=Accessed Time",
|
||||
"TagsDAO.fileColumns.createdTimeColLbl=Created Time",
|
||||
"TagsDAO.fileColumns.sizeColLbl=Size",
|
||||
"TagsDAO.fileColumns.md5HashColLbl=MD5 Hash",
|
||||
"TagsDAO.fileColumns.userNameColLbl=User Name",
|
||||
"TagsDAO.fileColumns.noDescription=No Description",
|
||||
"TagsDAO.tagColumns.sourceNameColLbl=Source Name",
|
||||
"TagsDAO.tagColumns.origNameColLbl=Original Name",
|
||||
"TagsDAO.tagColumns.sourcePathColLbl=Source File Path",
|
||||
"TagsDAO.tagColumns.typeColLbl=Result Type",
|
||||
"TagsDAO.tagColumns.commentColLbl=Comment",
|
||||
"TagsDAO.tagColumns.userNameColLbl=User Name"})
|
||||
public class TagsDAO {
|
||||
|
||||
private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types
|
||||
private static final long CACHE_DURATION = 2;
|
||||
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
|
||||
private final Cache<SearchParams<?>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
|
||||
|
||||
private static final String USER_NAME_PROPERTY = "user.name"; //NON-NLS
|
||||
|
||||
private static final String FILE_TAG_TYPE_ID = "FILE_TAG";
|
||||
private static final String RESULT_TAG_TYPE_ID = "RESULT_TAG";
|
||||
|
||||
private static final List<ColumnKey> FILE_TAG_COLUMNS = Arrays.asList(
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_nameColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_originalName()), // GVDTODO handle translation
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_filePathColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_commentColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_modifiedTimeColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_changeTimeColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_accessTimeColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_createdTimeColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_sizeColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_md5HashColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_fileColumns_userNameColLbl()));
|
||||
|
||||
private static final List<ColumnKey> RESULT_TAG_COLUMNS = Arrays.asList(
|
||||
getFileColumnKey(Bundle.TagsDAO_tagColumns_sourceNameColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_tagColumns_origNameColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_tagColumns_sourcePathColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_tagColumns_typeColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_tagColumns_commentColLbl()),
|
||||
getFileColumnKey(Bundle.TagsDAO_tagColumns_userNameColLbl()));
|
||||
|
||||
private static TagsDAO instance = null;
|
||||
|
||||
synchronized static TagsDAO getInstance() {
|
||||
if (instance == null) {
|
||||
instance = new TagsDAO();
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
private static ColumnKey getFileColumnKey(String name) {
|
||||
return new ColumnKey(name, name, Bundle.TagsDAO_fileColumns_noDescription());
|
||||
}
|
||||
|
||||
public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
|
||||
if (key.getTagName() == null) {
|
||||
throw new IllegalArgumentException("Must have non-null tag name");
|
||||
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
|
||||
throw new IllegalArgumentException("Data source id must be greater than 0 or null");
|
||||
} else if (key.getTagType() == null) {
|
||||
throw new IllegalArgumentException("Must have non-null tag type");
|
||||
}
|
||||
|
||||
SearchParams<TagsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
|
||||
if (hardRefresh) {
|
||||
this.searchParamsCache.invalidate(searchParams);
|
||||
}
|
||||
|
||||
return searchParamsCache.get(searchParams, () -> fetchTagsDTOs(searchParams));
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"FileTag.name.text=File Tag",
|
||||
"ResultTag.name.text=Result Tag"})
|
||||
private SearchResultsDTO fetchTagsDTOs(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
switch (cacheKey.getParamData().getTagType()) {
|
||||
case FILE:
|
||||
return fetchFileTags(cacheKey);
|
||||
case RESULT:
|
||||
return fetchResultTags(cacheKey);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported tag type");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of paged tag results.
|
||||
*
|
||||
* @param tags The tag results.
|
||||
* @param searchParams The search parameters including the paging.
|
||||
*
|
||||
* @return The list of paged tag results.
|
||||
*/
|
||||
List<? extends Tag> getPaged(List<? extends Tag> tags, SearchParams<?> searchParams) {
|
||||
Stream<? extends Tag> pagedTagsStream = tags.stream()
|
||||
.sorted(Comparator.comparing((tag) -> tag.getId()))
|
||||
.skip(searchParams.getStartItem());
|
||||
|
||||
if (searchParams.getMaxResultsCount() != null) {
|
||||
pagedTagsStream = pagedTagsStream.limit(searchParams.getMaxResultsCount());
|
||||
}
|
||||
|
||||
return pagedTagsStream.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private SearchResultsDTO fetchResultTags(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
TagName tagName = cacheKey.getParamData().getTagName();
|
||||
|
||||
// get all tag results
|
||||
List<BlackboardArtifactTag> allTags = new ArrayList<>();
|
||||
List<BlackboardArtifactTag> artifactTags = (dataSourceId != null && dataSourceId > 0)
|
||||
? Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName, dataSourceId)
|
||||
: Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByTagName(tagName);
|
||||
if (UserPreferences.showOnlyCurrentUserTags()) {
|
||||
String userName = System.getProperty(USER_NAME_PROPERTY);
|
||||
for (BlackboardArtifactTag tag : artifactTags) {
|
||||
if (userName.equals(tag.getUserName())) {
|
||||
allTags.add(tag);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
allTags.addAll(artifactTags);
|
||||
}
|
||||
|
||||
// get current page of tag results
|
||||
List<? extends Tag> pagedTags = getPaged(allTags, cacheKey);
|
||||
|
||||
List<RowDTO> fileRows = new ArrayList<>();
|
||||
for (Tag tag : pagedTags) {
|
||||
BlackboardArtifactTag blackboardTag = (BlackboardArtifactTag) tag;
|
||||
|
||||
String name = blackboardTag.getContent().getName(); // As a backup.
|
||||
try {
|
||||
name = blackboardTag.getArtifact().getShortDescription();
|
||||
} catch (TskCoreException ignore) {
|
||||
// it's a WARNING, skip
|
||||
}
|
||||
|
||||
String contentPath;
|
||||
try {
|
||||
contentPath = blackboardTag.getContent().getUniquePath();
|
||||
} catch (TskCoreException ex) {
|
||||
contentPath = NbBundle.getMessage(this.getClass(), "BlackboardArtifactTagNode.createSheet.unavail.text");
|
||||
}
|
||||
|
||||
List<Object> cellValues = Arrays.asList(name,
|
||||
null, // GVDTODO translation column
|
||||
contentPath,
|
||||
blackboardTag.getArtifact().getDisplayName(),
|
||||
blackboardTag.getComment(),
|
||||
blackboardTag.getUserName());
|
||||
|
||||
fileRows.add(new BaseRowDTO(
|
||||
cellValues,
|
||||
RESULT_TAG_TYPE_ID,
|
||||
blackboardTag.getId()));
|
||||
}
|
||||
|
||||
return new BaseSearchResultsDTO(RESULT_TAG_TYPE_ID, Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size());
|
||||
}
|
||||
|
||||
private SearchResultsDTO fetchFileTags(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
|
||||
|
||||
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
|
||||
TagName tagName = cacheKey.getParamData().getTagName();
|
||||
|
||||
// get all tag results
|
||||
List<ContentTag> allTags = new ArrayList<>();
|
||||
List<ContentTag> contentTags = (dataSourceId != null && dataSourceId > 0)
|
||||
? Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByTagName(tagName, dataSourceId)
|
||||
: Case.getCurrentCaseThrows().getServices().getTagsManager().getContentTagsByTagName(tagName);
|
||||
if (UserPreferences.showOnlyCurrentUserTags()) {
|
||||
String userName = System.getProperty(USER_NAME_PROPERTY);
|
||||
for (ContentTag tag : contentTags) {
|
||||
if (userName.equals(tag.getUserName())) {
|
||||
allTags.add(tag);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
allTags.addAll(contentTags);
|
||||
}
|
||||
|
||||
// get current page of tag results
|
||||
List<? extends Tag> pagedTags = getPaged(allTags, cacheKey);
|
||||
|
||||
List<RowDTO> fileRows = new ArrayList<>();
|
||||
for (Tag tag : pagedTags) {
|
||||
ContentTag contentTag = (ContentTag) tag;
|
||||
Content content = contentTag.getContent();
|
||||
String contentPath = content.getUniquePath();
|
||||
AbstractFile file = content instanceof AbstractFile ? (AbstractFile) content : null;
|
||||
|
||||
List<Object> cellValues = Arrays.asList(
|
||||
content.getName(),
|
||||
null, // GVDTODO translation column
|
||||
contentPath,
|
||||
contentTag.getComment(),
|
||||
file != null ? TimeZoneUtils.getFormattedTime(file.getMtime()) : "",
|
||||
file != null ? TimeZoneUtils.getFormattedTime(file.getCtime()) : "",
|
||||
file != null ? TimeZoneUtils.getFormattedTime(file.getAtime()) : "",
|
||||
file != null ? TimeZoneUtils.getFormattedTime(file.getCrtime()) : "",
|
||||
content.getSize(),
|
||||
file != null ? StringUtils.defaultString(file.getMd5Hash()) : "",
|
||||
contentTag.getUserName());
|
||||
|
||||
fileRows.add(new BaseRowDTO(
|
||||
cellValues,
|
||||
FILE_TAG_TYPE_ID,
|
||||
file.getId()));
|
||||
}
|
||||
|
||||
return new BaseSearchResultsDTO(FILE_TAG_TYPE_ID, Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size());
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles fetching and paging of data for allTags.
|
||||
*/
|
||||
public static class TagFetcher extends DAOFetcher<TagsSearchParams> {
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param params Parameters to handle fetching of data.
|
||||
*/
|
||||
public TagFetcher(TagsSearchParams params) {
|
||||
super(params);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
|
||||
return MainDAO.getInstance().getTagsDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
TagsSearchParams params = this.getParameters();
|
||||
String eventType = evt.getPropertyName();
|
||||
|
||||
// handle artifact/result tag changes
|
||||
if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString())
|
||||
|| eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) {
|
||||
|
||||
// ignore non-artifact/result tag changes
|
||||
if (params.getTagType() != TagsSearchParams.TagType.RESULT) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (evt instanceof AutopsyEvent) {
|
||||
if (evt instanceof BlackBoardArtifactTagAddedEvent) {
|
||||
// An artifact associated with the current case has been tagged.
|
||||
BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt;
|
||||
// ensure tag added event has a valid content id
|
||||
if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) {
|
||||
return false;
|
||||
}
|
||||
return params.getTagName().getId() == event.getAddedTag().getId();
|
||||
} else if (evt instanceof BlackBoardArtifactTagDeletedEvent) {
|
||||
// A tag has been removed from an artifact associated with the current case.
|
||||
BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt;
|
||||
// ensure tag deleted event has a valid content id
|
||||
BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo();
|
||||
if (deletedTagInfo == null) {
|
||||
return false;
|
||||
}
|
||||
return params.getTagName().getId() == deletedTagInfo.getTagID();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// handle file/content tag changes
|
||||
if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString())
|
||||
|| eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) {
|
||||
|
||||
// ignore non-file/content tag changes
|
||||
if (params.getTagType() != TagsSearchParams.TagType.FILE) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (evt instanceof AutopsyEvent) {
|
||||
if (evt instanceof ContentTagAddedEvent) {
|
||||
// Content associated with the current case has been tagged.
|
||||
ContentTagAddedEvent event = (ContentTagAddedEvent) evt;
|
||||
// ensure tag added event has a valid content id
|
||||
if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) {
|
||||
return false;
|
||||
}
|
||||
return params.getTagName().getId() == event.getAddedTag().getId();
|
||||
} else if (evt instanceof ContentTagDeletedEvent) {
|
||||
// A tag has been removed from content associated with the current case.
|
||||
ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt;
|
||||
// ensure tag deleted event has a valid content id
|
||||
ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo();
|
||||
if (deletedTagInfo == null) {
|
||||
return false;
|
||||
}
|
||||
return params.getTagName().getId() == deletedTagInfo.getTagID();
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
89
Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsSearchParams.java
Executable file
89
Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsSearchParams.java
Executable file
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
|
||||
/**
|
||||
* Key for accessing data about tags from the DAO.
|
||||
*/
|
||||
public class TagsSearchParams {
|
||||
|
||||
public enum TagType {
|
||||
FILE,
|
||||
RESULT;
|
||||
}
|
||||
|
||||
private final TagType type;
|
||||
private final TagName tagName;
|
||||
private final Long dataSourceId;
|
||||
|
||||
public TagsSearchParams(TagName tagName, TagType type, Long dataSourceId) {
|
||||
this.tagName = tagName;
|
||||
this.type = type;
|
||||
this.dataSourceId = dataSourceId;
|
||||
}
|
||||
|
||||
public TagName getTagName() {
|
||||
return tagName;
|
||||
}
|
||||
|
||||
public TagType getTagType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Long getDataSourceId() {
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 7;
|
||||
hash = 23 * hash + Objects.hashCode(this.tagName);
|
||||
hash = 23 * hash + Objects.hashCode(this.type);
|
||||
hash = 23 * hash + Objects.hashCode(this.dataSourceId);
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final TagsSearchParams other = (TagsSearchParams) obj;
|
||||
if (!Objects.equals(this.tagName, other.tagName)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
|
||||
return false;
|
||||
}
|
||||
if (!Objects.equals(this.type, other.type)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.datamodel;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A list of items to display in the tree.
|
||||
*/
|
||||
public class TreeResultsDTO<T> {
|
||||
|
||||
private final List<TreeItemDTO<T>> items;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param items The items to display.
|
||||
*/
|
||||
public TreeResultsDTO(List<TreeItemDTO<T>> items) {
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The items to display.
|
||||
*/
|
||||
public List<TreeItemDTO<T>> getItems() {
|
||||
return items;
|
||||
}
|
||||
|
||||
/**
|
||||
* A result providing a category and a count for that category. Equals and
|
||||
* hashCode are based on id, type id, and type data.
|
||||
*/
|
||||
public static class TreeItemDTO<T> {
|
||||
|
||||
private final String displayName;
|
||||
private final String typeId;
|
||||
private final Long count;
|
||||
private final T typeData;
|
||||
private final Object id;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param typeId The id of this item type.
|
||||
* @param typeData Data for this particular row's type (i.e.
|
||||
* BlackboardArtifact.Type for counts of a particular
|
||||
* artifact type).
|
||||
* @param id The id of this row. Can be any object that
|
||||
* implements equals and hashCode.
|
||||
* @param displayName The display name of this row.
|
||||
* @param count The count of results for this row or null if not
|
||||
* applicable.
|
||||
*/
|
||||
public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) {
|
||||
this.typeId = typeId;
|
||||
this.id = id;
|
||||
this.displayName = displayName;
|
||||
this.count = count;
|
||||
this.typeData = typeData;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The display name of this row.
|
||||
*/
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The count of results for this row or null if not applicable.
|
||||
*/
|
||||
public Long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return Data for this particular row's type (i.e.
|
||||
* BlackboardArtifact.Type for counts of a particular artifact
|
||||
* type).
|
||||
*/
|
||||
public T getTypeData() {
|
||||
return typeData;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The id of this row. Can be any object that implements equals
|
||||
* and hashCode.
|
||||
*/
|
||||
public Object getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The id of this item type.
|
||||
*/
|
||||
public String getTypeId() {
|
||||
return typeId;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,365 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.nodes;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
|
||||
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
|
||||
|
||||
/**
|
||||
* Factory for displaying analysis result types in the tree.
|
||||
*/
|
||||
public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSearchParam> {
|
||||
|
||||
private static Set<Integer> SET_TREE_ARTIFACTS = ImmutableSet.of(
|
||||
BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID(),
|
||||
BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(),
|
||||
BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID()
|
||||
);
|
||||
|
||||
/**
|
||||
* Returns the path to the icon to use for this artifact type.
|
||||
*
|
||||
* @param artType The artifact type.
|
||||
*
|
||||
* @return The path to the icon to use for this artifact type.
|
||||
*/
|
||||
private static String getIconPath(BlackboardArtifact.Type artType) {
|
||||
String iconPath = IconsUtil.getIconFilePath(artType.getTypeID());
|
||||
return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath;
|
||||
}
|
||||
|
||||
private final Long dataSourceId;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param dataSourceId The data source id to filter on or null if no filter.
|
||||
*/
|
||||
public AnalysisResultTypeFactory(Long dataSourceId) {
|
||||
this.dataSourceId = dataSourceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TreeResultsDTO<? extends AnalysisResultSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
|
||||
return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultCounts(dataSourceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TreeNode<AnalysisResultSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> rowData) {
|
||||
// if (SET_TREE_ARTIFACTS.contains(rowData.getTypeData().getArtifactType().getTypeID())) {
|
||||
// return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null));
|
||||
// } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getTypeData().getArtifactType())) {
|
||||
// return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null));
|
||||
// } else {
|
||||
return new AnalysisResultTypeTreeNode(rowData);
|
||||
// }
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* This is a stop gap measure until a different way of handling the
|
||||
* closing of cases is worked out. Currently, remote events may be
|
||||
* received for a case that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Due to some unresolved issues with how cases are closed, it
|
||||
* is possible for the event to have a null oldValue if the
|
||||
* event is a remote event.
|
||||
*/
|
||||
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != event && Category.ANALYSIS_RESULT.equals(event.getBlackboardArtifactType().getCategory())
|
||||
&& !(AnalysisResultDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) {
|
||||
return true;
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* See if expected blackboard type matches event.
|
||||
*
|
||||
* @param expectedType The expected artifact type.
|
||||
* @param evt The event.
|
||||
*
|
||||
* @return If the event is a data added event and contains the provided
|
||||
* type.
|
||||
*/
|
||||
private static boolean isRefreshRequired(BlackboardArtifact.Type expectedType, PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* This is a stop gap measure until a different way of handling the
|
||||
* closing of cases is worked out. Currently, remote events may be
|
||||
* received for a case that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Due to some unresolved issues with how cases are closed, it
|
||||
* is possible for the event to have a null oldValue if the
|
||||
* event is a remote event.
|
||||
*/
|
||||
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
|
||||
// GVDTODO it may be necessary to have more fine-grained check for refresh here.
|
||||
if (null != event && expectedType.equals(event.getBlackboardArtifactType())) {
|
||||
return true;
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display name and count of an analysis result type in the tree.
|
||||
*/
|
||||
static class AnalysisResultTypeTreeNode extends TreeNode<AnalysisResultSearchParam> {
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param itemData The data to display.
|
||||
*/
|
||||
public AnalysisResultTypeTreeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> itemData) {
|
||||
super(itemData.getTypeData().getArtifactType().getTypeName(),
|
||||
getIconPath(itemData.getTypeData().getArtifactType()),
|
||||
itemData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
dataResultPanel.displayAnalysisResult(this.getItemData().getTypeData());
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// /**
|
||||
// * An analysis result type node that has nested children.
|
||||
// */
|
||||
// static class TreeTypeNode extends TreeNode<AnalysisResultSearchParam> {
|
||||
//
|
||||
// /**
|
||||
// * Main constructor.
|
||||
// *
|
||||
// * @param itemData The data to display.
|
||||
// */
|
||||
// public TreeTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> itemData, ChildFactory childFactory) {
|
||||
// super(itemData.getTypeData().getArtifactType().getTypeName(),
|
||||
// getIconPath(itemData.getTypeData().getArtifactType()),
|
||||
// itemData,
|
||||
// Children.create(childFactory, true),
|
||||
// getDefaultLookup(itemData));
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
// // GVDTODO...NO OP???
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * Factory displaying all hashset sets with count in the tree.
|
||||
// */
|
||||
// static class TreeSetFactory extends TreeChildFactory<AnalysisResultSetSearchParam> {
|
||||
//
|
||||
// private final BlackboardArtifact.Type artifactType;
|
||||
// private final Long dataSourceId;
|
||||
// private final String nullSetName;
|
||||
//
|
||||
// /**
|
||||
// * Main constructor.
|
||||
// *
|
||||
// * @param artifactType The type of artifact.
|
||||
// * @param dataSourceId The data source object id for which the results
|
||||
// * should be filtered or null if no data source
|
||||
// * filtering.
|
||||
// * @param nullSetName The name of the set for artifacts with no
|
||||
// * TSK_SET_NAME value. If null, items are omitted.
|
||||
// */
|
||||
// public TreeSetFactory(BlackboardArtifact.Type artifactType, Long dataSourceId, String nullSetName) {
|
||||
// this.artifactType = artifactType;
|
||||
// this.dataSourceId = dataSourceId;
|
||||
// this.nullSetName = nullSetName;
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected TreeResultsDTO<? extends AnalysisResultSetSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
|
||||
// return MainDAO.getInstance().getAnalysisResultDAO().getSetCounts(this.artifactType, this.dataSourceId, this.nullSetName);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
// return AnalysisResultTypeFactory.isRefreshRequired(artifactType, evt);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
|
||||
// return new TreeSetTypeNode(rowData, Children.LEAF);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * A node for a set within an artifact type.
|
||||
// */
|
||||
// static class TreeSetTypeNode extends TreeNode<AnalysisResultSetSearchParam> {
|
||||
//
|
||||
// /**
|
||||
// * Main constructor.
|
||||
// *
|
||||
// * @param artifactType The type of artifact.
|
||||
// * @param itemData The data to display.
|
||||
// */
|
||||
// public TreeSetTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> itemData, Children children) {
|
||||
// super(itemData.getTypeData().getArtifactType().getTypeName(),
|
||||
// getIconPath(itemData.getTypeData().getArtifactType()),
|
||||
// itemData,
|
||||
// children,
|
||||
// getDefaultLookup(itemData));
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
// dataResultPanel.displayAnalysisResultSet(this.getItemData().getTypeData());
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//
|
||||
// @Messages({
|
||||
// "AnalysisResultTypeFactory_adHocName=Adhoc Results"
|
||||
// })
|
||||
// static class KeywordSetFactory extends TreeSetFactory {
|
||||
//
|
||||
// public KeywordSetFactory(Long dataSourceId) {
|
||||
// super(BlackboardArtifact.Type.TSK_KEYWORD_HIT, dataSourceId, Bundle.AnalysisResultTypeFactory_adHocName());
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
|
||||
// return new TreeSetTypeNode(rowData, Children.LEAF);
|
||||
// }
|
||||
//
|
||||
//
|
||||
//
|
||||
// }
|
||||
//
|
||||
// public static class KeywordSearchTermParams {
|
||||
// private final String setName;
|
||||
// private final String searchTerm;
|
||||
// private final boolean hasChildren;
|
||||
// private final Long dataSourceId;
|
||||
//
|
||||
// public KeywordSearchTermParams(String setName, String searchTerm, boolean hasChildren, Long dataSourceId) {
|
||||
// this.setName = setName;
|
||||
// this.searchTerm = searchTerm;
|
||||
// this.hasChildren = hasChildren;
|
||||
// this.dataSourceId = dataSourceId;
|
||||
// }
|
||||
//
|
||||
// public String getSetName() {
|
||||
// return setName;
|
||||
// }
|
||||
//
|
||||
// public String getSearchTerm() {
|
||||
// return searchTerm;
|
||||
// }
|
||||
//
|
||||
// public boolean hasChildren() {
|
||||
// return hasChildren;
|
||||
// }
|
||||
//
|
||||
// public Long getDataSourceId() {
|
||||
// return dataSourceId;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// static class KeywordSearchTermFactory extends TreeChildFactory<KeywordSearchTermParams> {
|
||||
// private final AnalysisResultSetSearchParam setParams;
|
||||
//
|
||||
// public KeywordSearchTermFactory(AnalysisResultSetSearchParam setParams) {
|
||||
// this.setParams = setParams;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// @Override
|
||||
// protected TreeNode<KeywordSearchTermParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> rowData) {
|
||||
// return new KeywordSearchTermNode(rowData);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// protected TreeResultsDTO<? extends KeywordSearchTermParams> getChildResults() throws IllegalArgumentException, ExecutionException {
|
||||
// return MainDAO.getInstance().getAnalysisResultDAO().getKeywordSetCounts(this.setParams);
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
// return AnalysisResultTypeFactory.isRefreshRequired(BlackboardArtifact.Type.TSK_KEYWORD_HIT, evt);
|
||||
// }
|
||||
//
|
||||
// }
|
||||
//
|
||||
// static class KeywordSearchTermNode extends TreeNode<KeywordSearchTermParams> {
|
||||
//
|
||||
// public KeywordSearchTermNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> itemData) {
|
||||
// super(itemData.getTypeData().getSearchTerm(),
|
||||
// getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT),
|
||||
// itemData,
|
||||
// itemData.getTypeData().hasChildren() ? Children.create(new KeywordFoundMatchFactory(itemData), true) : Children.LEAF,
|
||||
// getDefaultLookup(itemData));
|
||||
// }
|
||||
//
|
||||
// @Override
|
||||
// public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
// KeywordSearchTermParams searchParams = this.getItemData().getTypeData();
|
||||
//
|
||||
// if (!searchParams.hasChildren()) {
|
||||
// dataResultPanel.displayKeywordHits(new KeywordHitSearchParam(searchParams.getDataSourceId(), searchParams.getSetName(), null, searchParams.getSearchTerm()));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// }
|
||||
// public static class KeywordFoundMatchFactory
|
||||
// public static class KeywordFoundMatchNode
|
||||
}
|
@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.nodes;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
|
||||
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
|
||||
|
||||
/**
|
||||
* Factory for displaying data artifact types in the tree.
|
||||
*/
|
||||
public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearchParam> {
|
||||
|
||||
private final Long dataSourceId;
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*
|
||||
* @param dataSourceId The data source id to filter on or null if no filter.
|
||||
*/
|
||||
public DataArtifactTypeFactory(Long dataSourceId) {
|
||||
this.dataSourceId = dataSourceId;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TreeResultsDTO<? extends DataArtifactSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
|
||||
return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactCounts(dataSourceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TreeNode<DataArtifactSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> rowData) {
|
||||
return new DataArtifactTypeTreeNode(rowData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRefreshRequired(PropertyChangeEvent evt) {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
|
||||
/**
|
||||
* This is a stop gap measure until a different way of handling the
|
||||
* closing of cases is worked out. Currently, remote events may be
|
||||
* received for a case that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
/**
|
||||
* Due to some unresolved issues with how cases are closed, it
|
||||
* is possible for the event to have a null oldValue if the
|
||||
* event is a remote event.
|
||||
*/
|
||||
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
|
||||
if (null != event && Category.DATA_ARTIFACT.equals(event.getBlackboardArtifactType().getCategory())
|
||||
&& !(DataArtifactDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) {
|
||||
return true;
|
||||
}
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Display name and count of a data artifact type in the tree.
|
||||
*/
|
||||
public static class DataArtifactTypeTreeNode extends TreeNode<DataArtifactSearchParam> {
|
||||
|
||||
private static String getIconPath(BlackboardArtifact.Type artType) {
|
||||
String iconPath = IconsUtil.getIconFilePath(artType.getTypeID());
|
||||
return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath;
|
||||
}
|
||||
|
||||
public DataArtifactTypeTreeNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData) {
|
||||
super(itemData.getTypeData().getArtifactType().getTypeName(),
|
||||
getIconPath(itemData.getTypeData().getArtifactType()),
|
||||
itemData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void respondSelection(DataResultTopComponent dataResultPanel) {
|
||||
dataResultPanel.displayDataArtifact(this.getItemData().getTypeData());
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,220 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.nodes;
|
||||
|
||||
import com.google.common.collect.MapMaker;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.openide.nodes.ChildFactory;
|
||||
import org.openide.nodes.Node;
|
||||
import org.openide.util.WeakListeners;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
|
||||
import org.sleuthkit.autopsy.guiutils.RefreshThrottler.Refresher;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
|
||||
|
||||
/**
|
||||
* Factory for populating tree with results.
|
||||
*/
|
||||
public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object> implements Refresher {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(TreeChildFactory.class.getName());
|
||||
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST
|
||||
= EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
|
||||
|
||||
private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
|
||||
|
||||
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
|
||||
String eventType = evt.getPropertyName();
|
||||
if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
|
||||
// case was closed. Remove listeners so that we don't get called with a stale case handle
|
||||
if (evt.getNewValue() == null) {
|
||||
removeNotify();
|
||||
}
|
||||
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|
||||
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
/**
|
||||
* This is a stop gap measure until a different way of handling the
|
||||
* closing of cases is worked out. Currently, remote events may be
|
||||
* received for a case that is already closed.
|
||||
*/
|
||||
try {
|
||||
Case.getCurrentCaseThrows();
|
||||
refresh(false);
|
||||
} catch (NoCurrentCaseException notUsed) {
|
||||
/**
|
||||
* Case is closed, do nothing.
|
||||
*/
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
|
||||
|
||||
private final Map<Object, TreeNode<T>> typeNodeMap = new MapMaker().weakValues().makeMap();
|
||||
private TreeResultsDTO<? extends T> curResults = null;
|
||||
private Map<Object, TreeItemDTO<? extends T>> idMapping = new HashMap<>();
|
||||
|
||||
@Override
|
||||
protected boolean createKeys(List<Object> toPopulate) {
|
||||
if (curResults == null) {
|
||||
try {
|
||||
updateData();
|
||||
} catch (IllegalArgumentException | ExecutionException ex) {
|
||||
logger.log(Level.WARNING, "An error occurred while fetching keys", ex);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// update existing cached nodes
|
||||
List<Object> curResultIds = new ArrayList<>();
|
||||
for (TreeItemDTO<? extends T> dto : curResults.getItems()) {
|
||||
TreeNode<T> currentlyCached = typeNodeMap.get(dto.getId());
|
||||
if (currentlyCached != null) {
|
||||
currentlyCached.update(dto);
|
||||
}
|
||||
curResultIds.add(dto.getId());
|
||||
}
|
||||
|
||||
toPopulate.addAll(curResultIds);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Node createNodeForKey(Object treeItemId) {
|
||||
return typeNodeMap.computeIfAbsent(treeItemId, (id) -> {
|
||||
TreeItemDTO<? extends T> itemData = idMapping.get(id);
|
||||
// create new node if data for node exists. otherwise, return null.
|
||||
return itemData == null
|
||||
? null
|
||||
: createNewNode(itemData);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates local data by fetching data from the DAO's.
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
* @throws ExecutionException
|
||||
*/
|
||||
protected void updateData() throws IllegalArgumentException, ExecutionException {
|
||||
this.curResults = getChildResults();
|
||||
this.idMapping = curResults.getItems().stream()
|
||||
.collect(Collectors.toMap(item -> item.getId(), item -> item, (item1, item2) -> item1));
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refresh() {
|
||||
update();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches child view from the database and updates the tree.
|
||||
*/
|
||||
public void update() {
|
||||
try {
|
||||
updateData();
|
||||
} catch (IllegalArgumentException | ExecutionException ex) {
|
||||
logger.log(Level.WARNING, "An error occurred while fetching keys", ex);
|
||||
return;
|
||||
}
|
||||
this.refresh(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispose resources associated with this factory.
|
||||
*/
|
||||
private void disposeResources() {
|
||||
curResults = null;
|
||||
typeNodeMap.clear();
|
||||
idMapping.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Register listeners for autopsy events.
|
||||
*/
|
||||
private void registerListeners() {
|
||||
refreshThrottler.registerForIngestModuleEvents();
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
|
||||
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister listeners for autopsy events.
|
||||
*/
|
||||
private void unregisterListeners() {
|
||||
refreshThrottler.unregisterEventListener();
|
||||
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
|
||||
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void removeNotify() {
|
||||
disposeResources();
|
||||
unregisterListeners();
|
||||
super.removeNotify();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void finalize() throws Throwable {
|
||||
disposeResources();
|
||||
unregisterListeners();
|
||||
super.finalize();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addNotify() {
|
||||
registerListeners();
|
||||
super.addNotify();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a TreeNode given the tree item data.
|
||||
*
|
||||
* @param rowData The tree item data.
|
||||
*
|
||||
* @return The generated tree node.
|
||||
*/
|
||||
protected abstract TreeNode<T> createNewNode(TreeItemDTO<? extends T> rowData);
|
||||
|
||||
/**
|
||||
* Fetches data from the database to populate this part of the tree.
|
||||
*
|
||||
* @return The data.
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
* @throws ExecutionException
|
||||
*/
|
||||
protected abstract TreeResultsDTO<? extends T> getChildResults() throws IllegalArgumentException, ExecutionException;
|
||||
}
|
132
Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java
Normal file
132
Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java
Normal file
@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Autopsy Forensic Bitemser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.mainui.nodes;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.nodes.AbstractNode;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.util.Lookup;
|
||||
import org.openide.util.lookup.Lookups;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
|
||||
|
||||
/**
|
||||
* A node to be displayed in the tree that shows the count.
|
||||
*/
|
||||
public abstract class TreeNode<T> extends AbstractNode implements SelectionResponder {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(TreeNode.class.getName());
|
||||
|
||||
/**
|
||||
* Returns the default lookup based on the item dto.
|
||||
*
|
||||
* @param itemData The item dto data.
|
||||
*
|
||||
* @return The lookup to use in the node.
|
||||
*/
|
||||
protected static <T> Lookup getDefaultLookup(TreeItemDTO<? extends T> itemData) {
|
||||
return Lookups.fixed(itemData, itemData.getTypeData());
|
||||
}
|
||||
|
||||
private TreeItemDTO<? extends T> itemData;
|
||||
|
||||
/**
|
||||
* Main constructor assuming a leaf node with default lookup.
|
||||
*
|
||||
* @param nodeName The name of the node.
|
||||
* @param icon The path of the icon or null.
|
||||
* @param itemData The data to back the node.
|
||||
* @param dataObjType The type of the underlying data object within the
|
||||
* counts item dto.
|
||||
*/
|
||||
protected TreeNode(String nodeName, String icon, TreeItemDTO<? extends T> itemData) {
|
||||
this(nodeName, icon, itemData, Children.LEAF, getDefaultLookup(itemData));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param nodeName The name of the node.
|
||||
* @param icon The path of the icon or null.
|
||||
* @param itemData The data to back the node. Must be non-null.
|
||||
* @param children The children of this node.
|
||||
* @param lookup The lookup for this node.
|
||||
* @param dataObjType The type of the underlying data object within the
|
||||
* counts item dto.
|
||||
*/
|
||||
protected TreeNode(String nodeName, String icon, TreeItemDTO<? extends T> itemData, Children children, Lookup lookup) {
|
||||
super(children, lookup);
|
||||
setName(nodeName);
|
||||
if (icon != null) {
|
||||
setIconBaseWithExtension(icon);
|
||||
}
|
||||
update(itemData);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The current backing item data.
|
||||
*/
|
||||
protected TreeItemDTO<? extends T> getItemData() {
|
||||
return itemData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the display name of the node to include the display name and count
|
||||
* of the item.
|
||||
*
|
||||
* @param prevData The previous item data (may be null).
|
||||
* @param curData The item data (must be non-null).
|
||||
*/
|
||||
protected void updateDisplayName(TreeItemDTO<? extends T> prevData, TreeItemDTO<? extends T> curData) {
|
||||
// update display name only if there is a change.
|
||||
if (prevData == null
|
||||
|| !prevData.getDisplayName().equals(curData.getDisplayName())
|
||||
|| prevData.getCount() != curData.getCount()) {
|
||||
String displayName = curData.getCount() == null
|
||||
? curData.getDisplayName()
|
||||
: MessageFormat.format("{0} ({1})", curData.getDisplayName(), curData.getCount());
|
||||
|
||||
this.setDisplayName(displayName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the backing data of this node.
|
||||
*
|
||||
* @param updatedData The updated data. Must be non-null.
|
||||
*
|
||||
* @thitems IllegalArgumentException
|
||||
*/
|
||||
public void update(TreeItemDTO<? extends T> updatedData) {
|
||||
if (updatedData == null) {
|
||||
logger.log(Level.WARNING, "Expected non-null updatedData");
|
||||
} else if (this.itemData != null && this.itemData.getId() != updatedData.getId()) {
|
||||
logger.log(Level.WARNING, MessageFormat.format(
|
||||
"Expected update data to have same id but received [id: {0}] replacing [id: {1}]",
|
||||
updatedData.getId(),
|
||||
this.itemData.getId()));
|
||||
return;
|
||||
}
|
||||
|
||||
TreeItemDTO<? extends T> prevData = this.itemData;
|
||||
this.itemData = updatedData;
|
||||
updateDisplayName(prevData, updatedData);
|
||||
}
|
||||
}
|
@ -32,6 +32,7 @@ import org.netbeans.junit.NbModuleSuite;
|
||||
import org.netbeans.junit.NbTestCase;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.testutils.CaseUtils;
|
||||
import org.sleuthkit.autopsy.testutils.TestUtilsException;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
@ -51,6 +52,7 @@ import org.sleuthkit.datamodel.Pool;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.Volume;
|
||||
@ -81,6 +83,8 @@ public class TableSearchTest extends NbTestCase {
|
||||
private static final String ARTIFACT_CONFIGURATION = "Test configuration";
|
||||
private static final String ARTIFACT_JUSTIFICATION = "Test justification";
|
||||
private static final Score ARTIFACT_SCORE = Score.SCORE_LIKELY_NOTABLE;
|
||||
private static final long ARTIFACT_COUNT_WEB_BOOKMARK = 125;
|
||||
private static final long ARTIFACT_COUNT_YARA = 150;
|
||||
|
||||
// Values for the hash set hit tests
|
||||
private static final String HASH_SET_1 = "Hash Set 1";
|
||||
@ -94,12 +98,22 @@ public class TableSearchTest extends NbTestCase {
|
||||
private static final String KEYWORD_PREVIEW = "There is a bomb.";
|
||||
|
||||
// Extension and MIME type test
|
||||
private static AbstractFile customFile;
|
||||
private static final String CUSTOM_MIME_TYPE = "fake/type";
|
||||
private static final String CUSTOM_MIME_TYPE_FILE_NAME = "test.fake";
|
||||
private static final String CUSTOM_EXTENSION = "fake";
|
||||
private static final Set<String> CUSTOM_EXTENSIONS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("." + CUSTOM_EXTENSION))); //NON-NLS
|
||||
private static final Set<String> EMPTY_RESULT_SET_EXTENSIONS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(".blah", ".blah2", ".crazy"))); //NON-NLS
|
||||
|
||||
// Tag test
|
||||
private static final String TAG_COMMENT = "Tag comment";
|
||||
private static final String TAG_DESCRIPTION = "Tag description";
|
||||
private static final String MD5_COLUMN = "MD5 Hash";
|
||||
private static final String FILE_PATH_COLUMN = "File Path";
|
||||
private static final String MODIFIED_TIME_COLUMN = "Modified Time";
|
||||
private static final String SOURCE_NAME_COLUMN = "Source Name";
|
||||
private static final String SOURCE_FILE_PATH_COLUMN = "Source File Path";
|
||||
|
||||
/////////////////////////////////////////////////
|
||||
// Data to be used across the test methods.
|
||||
// These are initialized in setUpCaseDatabase().
|
||||
@ -107,6 +121,7 @@ public class TableSearchTest extends NbTestCase {
|
||||
Case openCase = null; // The case for testing
|
||||
SleuthkitCase db = null; // The case database
|
||||
Blackboard blackboard = null; // The blackboard
|
||||
TagsManager tagsManager = null;// Tags manager
|
||||
|
||||
DataSource dataSource1 = null; // A local files data source
|
||||
DataSource dataSource2 = null; // A local files data source
|
||||
@ -146,6 +161,10 @@ public class TableSearchTest extends NbTestCase {
|
||||
Volume fsTestVolumeB1 = null; // Another volume
|
||||
Pool fsTestPoolB = null; // A pool
|
||||
|
||||
// Tags test
|
||||
TagName knownTag1 = null;
|
||||
TagName tag2 = null;
|
||||
|
||||
public static Test suite() {
|
||||
NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(TableSearchTest.class).
|
||||
clusters(".*").
|
||||
@ -171,6 +190,7 @@ public class TableSearchTest extends NbTestCase {
|
||||
extensionSearchTest();
|
||||
sizeSearchTest();
|
||||
fileSystemTest();
|
||||
tagsTest();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -183,6 +203,7 @@ public class TableSearchTest extends NbTestCase {
|
||||
openCase = CaseUtils.createAsCurrentCase("testTableSearchCase");
|
||||
db = openCase.getSleuthkitCase();
|
||||
blackboard = db.getBlackboard();
|
||||
tagsManager = openCase.getServices().getTagsManager();
|
||||
|
||||
// Add two logical files data sources
|
||||
trans = db.beginTransaction();
|
||||
@ -214,7 +235,7 @@ public class TableSearchTest extends NbTestCase {
|
||||
fileB1.setMIMEType("text/plain");
|
||||
fileB1.save();
|
||||
|
||||
AbstractFile customFile = db.addLocalFile(CUSTOM_MIME_TYPE_FILE_NAME, "", 67000000, 0, 0, 0, 0, true, TskData.EncodingType.NONE, folderB1);
|
||||
customFile = db.addLocalFile(CUSTOM_MIME_TYPE_FILE_NAME, "", 67000000, 0, 0, 0, 0, true, TskData.EncodingType.NONE, folderB1);
|
||||
customFile.setMIMEType(CUSTOM_MIME_TYPE);
|
||||
customFile.save();
|
||||
|
||||
@ -251,6 +272,13 @@ public class TableSearchTest extends NbTestCase {
|
||||
customDataArtifactSourceFile = fileA3;
|
||||
customDataArtifactLinkedFile = fileA2;
|
||||
|
||||
// Add a lot of web bookmark data artifacts
|
||||
for (int i = 0;i < ARTIFACT_COUNT_WEB_BOOKMARK;i++) {
|
||||
attrs.clear();
|
||||
attrs.add(new BlackboardAttribute(BlackboardAttribute.Type.TSK_COMMENT, MODULE_NAME, Integer.toString(i)));
|
||||
fileA1.newDataArtifact(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, attrs);
|
||||
}
|
||||
|
||||
// Add analysis results
|
||||
// Data source 1: Encryption detected (2), custom type
|
||||
// Data source 2: Encryption detected
|
||||
@ -275,6 +303,13 @@ public class TableSearchTest extends NbTestCase {
|
||||
customAnalysisResult = customDataArtifact.newAnalysisResult(customAnalysisResultType, ARTIFACT_SCORE, ARTIFACT_CONCLUSION, ARTIFACT_CONFIGURATION, ARTIFACT_JUSTIFICATION, attrs).getAnalysisResult();
|
||||
customAnalysisResultSource = customDataArtifact;
|
||||
|
||||
// Add a lot of YARA hit analysis results
|
||||
for (int i = 0;i < ARTIFACT_COUNT_YARA;i++) {
|
||||
attrs.clear();
|
||||
attrs.add(new BlackboardAttribute(BlackboardAttribute.Type.TSK_COMMENT, MODULE_NAME, Integer.toString(i)));
|
||||
fileA1.newAnalysisResult(BlackboardArtifact.Type.TSK_YARA_HIT, Score.SCORE_NOTABLE, "conclusion", "configuration", "justification", attrs);
|
||||
}
|
||||
|
||||
// Add hash hits
|
||||
attrs.clear();
|
||||
attrs.add(new BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME, MODULE_NAME, HASH_SET_1));
|
||||
@ -400,12 +435,31 @@ public class TableSearchTest extends NbTestCase {
|
||||
|
||||
trans.commit();
|
||||
trans = null;
|
||||
} catch (TestUtilsException | TskCoreException | BlackboardException ex) {
|
||||
|
||||
// Add tags ----
|
||||
knownTag1 = tagsManager.addTagName("Tag 1", TAG_DESCRIPTION, TagName.HTML_COLOR.RED, TskData.FileKnown.KNOWN);
|
||||
tag2 = tagsManager.addTagName("Tag 2", "Descrition");
|
||||
|
||||
// Tag the custom artifacts in data source 1
|
||||
openCase.getServices().getTagsManager().addBlackboardArtifactTag(customDataArtifact, knownTag1, TAG_COMMENT);
|
||||
openCase.getServices().getTagsManager().addBlackboardArtifactTag(customAnalysisResult, tag2, "Comment 2");
|
||||
|
||||
// Tag file in data source 1
|
||||
openCase.getServices().getTagsManager().addContentTag(fileA2, tag2);
|
||||
openCase.getServices().getTagsManager().addContentTag(fileA3, tag2);
|
||||
|
||||
// Tag file in data source 2
|
||||
openCase.getServices().getTagsManager().addContentTag(fileB1, tag2);
|
||||
|
||||
// Tag the custom file in data source 2
|
||||
openCase.getServices().getTagsManager().addContentTag(customFile, knownTag1);
|
||||
|
||||
} catch (TestUtilsException | TskCoreException | BlackboardException | TagsManager.TagNameAlreadyExistsException ex) {
|
||||
if (trans != null) {
|
||||
try {
|
||||
trans.rollback();
|
||||
} catch (TskCoreException ex2) {
|
||||
ex2.printStackTrace();
|
||||
Exceptions.printStackTrace(ex2);
|
||||
}
|
||||
}
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -474,6 +528,41 @@ public class TableSearchTest extends NbTestCase {
|
||||
assertTrue(dataArtifactRowDTO.getCellValues().contains(ARTIFACT_INT));
|
||||
assertTrue(dataArtifactRowDTO.getCellValues().contains(ARTIFACT_DOUBLE));
|
||||
|
||||
// Test paging
|
||||
Long pageSize = new Long(100);
|
||||
assertTrue(ARTIFACT_COUNT_WEB_BOOKMARK > pageSize);
|
||||
|
||||
// Get the first page
|
||||
param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null);
|
||||
results = dataArtifactDAO.getDataArtifactsForTable(param, 0, pageSize, false);
|
||||
assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount());
|
||||
assertEquals(pageSize.longValue(), results.getItems().size());
|
||||
|
||||
// Save all artifact IDs from the first page
|
||||
Set<Long> firstPageObjIds = new HashSet<>();
|
||||
for (RowDTO row : results.getItems()) {
|
||||
assertTrue(row instanceof DataArtifactRowDTO);
|
||||
DataArtifactRowDTO dataRow = (DataArtifactRowDTO) row;
|
||||
assertTrue(dataRow.getDataArtifact() != null);
|
||||
firstPageObjIds.add(dataRow.getDataArtifact().getId());
|
||||
}
|
||||
assertEquals(pageSize.longValue(), firstPageObjIds.size());
|
||||
|
||||
// Get the second page
|
||||
param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null);
|
||||
results = dataArtifactDAO.getDataArtifactsForTable(param, pageSize, pageSize, false);
|
||||
assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount());
|
||||
assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK - pageSize, results.getItems().size());
|
||||
|
||||
// Make sure no artifacts from the second page appeared on the first
|
||||
for (RowDTO row : results.getItems()) {
|
||||
assertTrue(row instanceof DataArtifactRowDTO);
|
||||
DataArtifactRowDTO dataRow = (DataArtifactRowDTO) row;
|
||||
assertTrue(dataRow.getDataArtifact() != null);
|
||||
assertFalse("Data artifact ID: " + dataRow.getDataArtifact().getId() + " appeared on both page 1 and page 2",
|
||||
firstPageObjIds.contains(dataRow.getDataArtifact().getId()));
|
||||
}
|
||||
|
||||
} catch (ExecutionException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
@ -577,6 +666,87 @@ public class TableSearchTest extends NbTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void tagsTest() {
|
||||
// Quick test that everything is initialized
|
||||
assertTrue(db != null);
|
||||
|
||||
try {
|
||||
TagsDAO tagsDAO = MainDAO.getInstance().getTagsDAO();
|
||||
|
||||
// Get "Tag1" file tags from data source 1
|
||||
TagsSearchParams param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource1.getId());
|
||||
SearchResultsDTO results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(0, results.getTotalResultsCount());
|
||||
assertEquals(0, results.getItems().size());
|
||||
|
||||
// Get "Tag2" file tags from data source 1
|
||||
param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, dataSource1.getId());
|
||||
results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(2, results.getTotalResultsCount());
|
||||
assertEquals(2, results.getItems().size());
|
||||
|
||||
// Get "Tag2" file tags from all data sources
|
||||
param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, null);
|
||||
results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(3, results.getTotalResultsCount());
|
||||
assertEquals(3, results.getItems().size());
|
||||
|
||||
// Get "Tag1" file tags from data source 2
|
||||
param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource2.getId());
|
||||
results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(1, results.getTotalResultsCount());
|
||||
assertEquals(1, results.getItems().size());
|
||||
|
||||
// Get the row
|
||||
RowDTO rowDTO = results.getItems().get(0);
|
||||
assertTrue(rowDTO instanceof BaseRowDTO);
|
||||
BaseRowDTO tagResultRowDTO = (BaseRowDTO) rowDTO;
|
||||
|
||||
// Check that the file tag is for the custom file
|
||||
assertTrue(tagResultRowDTO.getCellValues().contains(customFile.getName()));
|
||||
|
||||
// Check that a few of the expected file tag column names are present
|
||||
List<String> columnDisplayNames = results.getColumns().stream().map(p -> p.getDisplayName()).collect(Collectors.toList());
|
||||
assertTrue(columnDisplayNames.contains(MD5_COLUMN));
|
||||
assertTrue(columnDisplayNames.contains(FILE_PATH_COLUMN));
|
||||
assertTrue(columnDisplayNames.contains(MODIFIED_TIME_COLUMN));
|
||||
|
||||
// Check that the result tag columns are not present
|
||||
assertFalse(columnDisplayNames.contains(SOURCE_NAME_COLUMN));
|
||||
assertFalse(columnDisplayNames.contains(SOURCE_FILE_PATH_COLUMN));
|
||||
|
||||
// Get "Tag1" result tags from data source 2
|
||||
param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource2.getId());
|
||||
results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(0, results.getTotalResultsCount());
|
||||
assertEquals(0, results.getItems().size());
|
||||
|
||||
// Get "Tag2" result tags from data source 1
|
||||
param = new TagsSearchParams(tag2, TagsSearchParams.TagType.RESULT, dataSource1.getId());
|
||||
results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(1, results.getTotalResultsCount());
|
||||
assertEquals(1, results.getItems().size());
|
||||
|
||||
// Get "Tag1" result tags from data source 1
|
||||
param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource1.getId());
|
||||
results = tagsDAO.getTags(param, 0, null, false);
|
||||
assertEquals(1, results.getTotalResultsCount());
|
||||
assertEquals(1, results.getItems().size());
|
||||
|
||||
// Get the row
|
||||
rowDTO = results.getItems().get(0);
|
||||
assertTrue(rowDTO instanceof BaseRowDTO);
|
||||
tagResultRowDTO = (BaseRowDTO) rowDTO;
|
||||
|
||||
// Check that some of the expected result tag column values are present
|
||||
assertTrue(tagResultRowDTO.getCellValues().contains(TAG_COMMENT));
|
||||
|
||||
} catch (ExecutionException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void analysisResultSearchTest() {
|
||||
// Quick test that everything is initialized
|
||||
assertTrue(db != null);
|
||||
@ -629,6 +799,41 @@ public class TableSearchTest extends NbTestCase {
|
||||
assertTrue(analysisResultRowDTO.getCellValues().contains(ARTIFACT_CONFIGURATION));
|
||||
assertTrue(analysisResultRowDTO.getCellValues().contains(ARTIFACT_CONCLUSION));
|
||||
|
||||
// Test paging
|
||||
Long pageSize = new Long(100);
|
||||
assertTrue(ARTIFACT_COUNT_YARA > pageSize);
|
||||
|
||||
// Get the first page
|
||||
param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null);
|
||||
results = analysisResultDAO.getAnalysisResultsForTable(param, 0, pageSize, false);
|
||||
assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount());
|
||||
assertEquals(pageSize.longValue(), results.getItems().size());
|
||||
|
||||
// Save all artifact IDs from the first page
|
||||
Set<Long> firstPageObjIds = new HashSet<>();
|
||||
for (RowDTO row : results.getItems()) {
|
||||
assertTrue(row instanceof AnalysisResultRowDTO);
|
||||
AnalysisResultRowDTO analysisRow = (AnalysisResultRowDTO) row;
|
||||
assertTrue(analysisRow.getAnalysisResult() != null);
|
||||
firstPageObjIds.add(analysisRow.getAnalysisResult().getId());
|
||||
}
|
||||
assertEquals(pageSize.longValue(), firstPageObjIds.size());
|
||||
|
||||
// Get the second page
|
||||
param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null);
|
||||
results = analysisResultDAO.getAnalysisResultsForTable(param, pageSize, pageSize, false);
|
||||
assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount());
|
||||
assertEquals(ARTIFACT_COUNT_YARA - pageSize, results.getItems().size());
|
||||
|
||||
// Make sure no artifacts from the second page appeared on the first
|
||||
for (RowDTO row : results.getItems()) {
|
||||
assertTrue(row instanceof AnalysisResultRowDTO);
|
||||
AnalysisResultRowDTO analysisRow = (AnalysisResultRowDTO) row;
|
||||
assertTrue(analysisRow.getAnalysisResult() != null);
|
||||
assertFalse("Analysis result ID: " + analysisRow.getAnalysisResult().getId() + " appeared on both page 1 and page 2",
|
||||
firstPageObjIds.contains(analysisRow.getAnalysisResult().getId()));
|
||||
}
|
||||
|
||||
} catch (ExecutionException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
Assert.fail(ex.getMessage());
|
||||
@ -894,5 +1099,7 @@ public class TableSearchTest extends NbTestCase {
|
||||
}
|
||||
openCase = null;
|
||||
db = null;
|
||||
blackboard = null;
|
||||
tagsManager = null;
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user