Merge pull request #7205 from wschaeferB/7877-ImprovedDiscoverySearchCancellation

7877 improved discovery search cancellation
This commit is contained in:
Richard Cordovano 2021-08-26 16:45:35 -04:00 committed by GitHub
commit daf22f6d5c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 530 additions and 162 deletions

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -54,14 +54,18 @@ public abstract class AbstractFilter {
* @param caseDb The case database
* @param centralRepoDb The central repo database. Can be null if the
* filter does not require it.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return The list of results that match this filter (and any that came
* before it)
*
* @throws DiscoveryException
* @throws SearchCancellationException Thrown when the user has cancelled
* the search.
*/
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
return new ArrayList<>();
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -78,10 +78,14 @@ public class DiscoveryAttributes {
* @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Default is to do nothing
}
}
@ -154,10 +158,13 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try {
Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb);
Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb, context);
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain Category Attribute was being added.");
}
if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result;
domain.addWebCategories(domainsToCategories.get(domain.getDomain()));
@ -172,14 +179,29 @@ public class DiscoveryAttributes {
* Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to
* the category name attribute. Each ResultDomain is then parsed and
* matched against this map of values.
*
* @param caseDb The case database.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return domainToCategory - A map of the domain names to the category
* name attribute they are classified as.
*
* @throws TskCoreException
* @throws InterruptedException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb) throws TskCoreException, InterruptedException {
private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb, SearchContext context) throws TskCoreException, InterruptedException, SearchCancellationException {
Map<String, Set<String>> domainToCategory = new HashMap<>();
for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) {
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException();
}
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while getting domains for artifact type: " + artifact.getDisplayName());
}
BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME));
BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN));
if (webCategory != null && domain != null) {
@ -206,14 +228,16 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, keyword list name) for all files in the list of files that have
// keyword list hits.
String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(),
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Keyword List Attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -278,8 +302,20 @@ public class DiscoveryAttributes {
* Example: query for notable status of google.com. Result: notable With
* this map, all domain instances that represent google.com can be updated
* after one simple lookup.
*
* @param domainsBatch The list of ResultDomains to organize.
* @param attributeType The type of correlation attribute being organized.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return resultDomainTable - A map of the normalized domain name to the
* list of ResultDomain objects which are part of that normalized
* domain.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType) {
private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType, SearchContext context) throws SearchCancellationException {
final Map<String, List<ResultDomain>> resultDomainTable = new HashMap<>();
for (ResultDomain domainInstance : domainsBatch) {
try {
@ -288,6 +324,9 @@ public class DiscoveryAttributes {
final List<ResultDomain> bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>());
bucket.add(domainInstance);
resultDomainTable.put(normalizedDomain, bucket);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while orgainizing domains by their normalized value.");
}
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain()));
}
@ -322,39 +361,73 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb != null) {
processFilesWithCr(results, centralRepoDb);
processFilesWithCr(results, centralRepoDb, context);
}
}
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo) throws DiscoveryException {
/**
* Helper method to batch the domain results and check for notability.
*
* @param results The results which are being checked for previously
* being notable in the CR.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultDomain> domainsBatch = new ArrayList<>();
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable attribute was being calculated with the CR.");
}
if (result.getType() == SearchData.Type.DOMAIN) {
domainsBatch.add((ResultDomain) result);
if (domainsBatch.size() == DOMAIN_BATCH_SIZE) {
queryPreviouslyNotable(domainsBatch, centralRepo);
queryPreviouslyNotable(domainsBatch, centralRepo, context);
domainsBatch.clear();
}
}
}
queryPreviouslyNotable(domainsBatch, centralRepo);
queryPreviouslyNotable(domainsBatch, centralRepo, context);
}
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo) throws DiscoveryException {
/**
* Helper method to check a batch of domains for notability.
*
*
* @param domainsBatch The list of ResultDomains to check for
* notability.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsBatch.isEmpty()) {
return;
}
try {
final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType, context);
final String values = createCSV(resultDomainTable.keySet());
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while checking for previously notable domains.");
}
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name "
+ "FROM " + tableName + " "
@ -421,7 +494,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) {
for (Result result : results) {
if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) {
@ -429,7 +502,7 @@ public class DiscoveryAttributes {
}
}
} else {
processResultFilesForCR(results, centralRepoDb);
processResultFilesForCR(results, centralRepoDb, context);
}
}
@ -437,16 +510,26 @@ public class DiscoveryAttributes {
* Private helper method for adding Frequency attribute when CR is
* enabled.
*
* @param files The list of ResultFiles to caluclate frequency
* for.
* @param centralRepoDb The central repository currently in use.
* @param results The results which are having their frequency
* checked.
* @param centralRepoDb The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void processResultFilesForCR(List<Result> results,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultFile> currentFiles = new ArrayList<>();
Set<String> hashesToLookUp = new HashSet<>();
List<ResultDomain> domainsToQuery = new ArrayList<>();
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency attribute was being calculated with the CR.");
}
// If frequency was already calculated, skip...
if (result.getFrequency() == SearchData.Frequency.UNKNOWN) {
if (result.getKnown() == TskData.FileKnown.KNOWN) {
@ -462,7 +545,7 @@ public class DiscoveryAttributes {
}
if (hashesToLookUp.size() >= BATCH_SIZE) {
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
hashesToLookUp.clear();
currentFiles.clear();
@ -470,16 +553,15 @@ public class DiscoveryAttributes {
} else {
domainsToQuery.add((ResultDomain) result);
if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) {
queryDomainFrequency(domainsToQuery, centralRepoDb);
queryDomainFrequency(domainsToQuery, centralRepoDb, context);
domainsToQuery.clear();
}
}
}
}
queryDomainFrequency(domainsToQuery, centralRepoDb);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb);
queryDomainFrequency(domainsToQuery, centralRepoDb, context);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
}
}
@ -487,17 +569,22 @@ public class DiscoveryAttributes {
* Query to get the frequency of a domain.
*
* @param domainsToQuery List of domains to check the frequency of.
* @param centralRepository The central repository to query.
* @param centralRepository The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository) throws DiscoveryException {
private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsToQuery.isEmpty()) {
return;
}
try {
final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType, context);
final String values = createCSV(resultDomainTable.keySet());
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM"
@ -508,8 +595,11 @@ public class DiscoveryAttributes {
+ ")) AS foo GROUP BY value";
final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
if (frequencyCallback.getCause() != null) {
throw frequencyCallback.getCause();
}
@ -620,7 +710,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, hash set name) for all files in the list of files that have
// hash set hits.
@ -628,6 +718,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
HashSetNamesCallback callback = new HashSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Hash Hit attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -695,7 +788,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, interesting item set name) for all files in the list of files that have
// interesting file set hits.
@ -703,6 +796,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Interesting Item attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -808,7 +904,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, object type name) for all files in the list of files that have
// objects detected
@ -816,6 +912,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID());
ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Object Detected attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -884,10 +983,13 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try {
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while File Tag attribute was being added.");
}
if (result.getType() == SearchData.Type.DOMAIN) {
return;
}
@ -995,14 +1097,20 @@ public class DiscoveryAttributes {
}
/**
*
* Computes the CR frequency of all the given hashes and updates the list of
* files.
*
* @param hashesToLookUp Hashes to find the frequency of.
* @param currentFiles List of files to update with frequencies.
* @param centralRepoDb The central repository being used.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb) {
private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb, SearchContext context) throws SearchCancellationException {
if (hashesToLookUp.isEmpty()) {
return;
@ -1022,7 +1130,9 @@ public class DiscoveryAttributes {
FrequencyCallback callback = new FrequencyCallback(currentFiles);
centralRepoDb.processSelectClause(selectClause, callback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
} catch (CentralRepoException ex) {
logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS
}

View File

@ -59,6 +59,7 @@ public class DiscoveryKeyUtils {
private final List<AbstractFilter> filters;
private final SleuthkitCase sleuthkitCase;
private final CentralRepository centralRepository;
private final SearchContext context;
/**
* Construct a new SearchKey with all information that defines a search.
@ -70,16 +71,20 @@ public class DiscoveryKeyUtils {
* @param sortingMethod The method to sort the results by.
* @param sleuthkitCase The SleuthkitCase being searched.
* @param centralRepository The Central Repository being searched.
* @param context The SearchContext which reflects the search
* being performed to get results for this
* key.
*/
SearchKey(String userName, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod,
SleuthkitCase sleuthkitCase, CentralRepository centralRepository) {
SleuthkitCase sleuthkitCase, CentralRepository centralRepository, SearchContext context) {
this.groupAttributeType = groupAttributeType;
this.groupSortingType = groupSortingType;
this.sortingMethod = sortingMethod;
this.filters = filters;
this.context = context;
StringBuilder searchStringBuilder = new StringBuilder();
searchStringBuilder.append(userName);
@ -93,8 +98,8 @@ public class DiscoveryKeyUtils {
}
/**
* Construct a SearchKey without a SleuthkitCase or CentralRepositry
* instance.
* Construct a SearchKey without a SearchContext, SleuthkitCase or
* CentralRepositry instance.
*
* @param userName The name of the user performing the search.
* @param filters The Filters being used for the search.
@ -107,7 +112,8 @@ public class DiscoveryKeyUtils {
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod) {
this(userName, filters, groupAttributeType, groupSortingType,
sortingMethod, null, null);
sortingMethod, null, null, null);
//this constructor should only be used putting things directly into a map or getting if present since casedb, cr, and search context will be null
}
@Override
@ -141,6 +147,23 @@ public class DiscoveryKeyUtils {
return hash;
}
/**
* Get the SearchContext for the search this key is being used in.
*
* @return The SearchContext the search key is being used in.
*
* @throws DiscoveryException Thrown when the key being used has a null
* context indicating it was not created with
* knowledge of the case or central
* repository databases.
*/
SearchContext getContext() throws DiscoveryException {
if (context == null) {
throw new DiscoveryException("The key in use was created without a context and does not support retrieving information from the databases.");
}
return context;
}
/**
* Get the String representation of this key.
*

View File

@ -78,24 +78,31 @@ public class DomainSearch {
* @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null
* if not needed.
* @param context The SearchContext the search is being performed from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
public Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb);
domainSortingMethod, caseDb, centralRepoDb, context);
// Transform the cached results into a map of group key to group size.
final LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size());
}
@ -130,11 +137,11 @@ public class DomainSearch {
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod,
GroupKey groupKey, int startingEntry, int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb);
domainSortingMethod, caseDb, centralRepoDb, context);
final List<Result> domainsInGroup = searchResults.get(groupKey);
final List<Result> page = new ArrayList<>();
for (int i = startingEntry; (i < startingEntry + numberOfEntries)

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -55,20 +55,24 @@ class DomainSearchCache {
* @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return Domain search results matching the given parameters.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
Map<GroupKey, List<Result>> get(String userName,
List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try {
final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType,
groupSortingType, domainSortingMethod, caseDb, centralRepoDb);
groupSortingType, domainSortingMethod, caseDb, centralRepoDb, context);
return cache.get(searchKey);
} catch (ExecutionException ex) {
throw new DiscoveryException("Error fetching results from cache", ex.getCause());

View File

@ -73,7 +73,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException();
}
attr.addAttributeToResults(domainResults,
key.getSleuthkitCase(), key.getCentralRepository());
key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
}
// Apply secondary in memory filters
for (AbstractFilter filter : key.getFilters()) {
@ -81,7 +81,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException();
}
if (filter.useAlternateFilter()) {
domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository());
domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
}
}
// Sort the ResultDomains by the requested criteria.

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -62,17 +62,21 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return The raw search results
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
static SearchResults runFileSearchDebug(String userName,
List<AbstractFilter> filters,
AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group
// and sort them. For example, if we're grouping by central repo frequency, we need
@ -82,10 +86,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb);
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb);
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -114,21 +118,28 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
public static Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters,
AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
Map<GroupKey, List<Result>> searchResults = runFileSearch(userName, filters,
groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb);
groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size());
}
return groupSizes;
@ -151,10 +162,14 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
public static List<Result> getFilesInGroup(String userName,
List<AbstractFilter> filters,
@ -164,7 +179,7 @@ public class FileSearch {
GroupKey groupKey,
int startingEntry,
int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
//the group should be in the cache at this point
List<Result> filesInGroup = null;
SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod);
@ -178,7 +193,7 @@ public class FileSearch {
List<Result> page = new ArrayList<>();
if (filesInGroup == null) {
logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey);
runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb);
runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
synchronized (searchCache) {
resultsMap = searchCache.getIfPresent(searchKey.getKeyString());
}
@ -218,7 +233,6 @@ public class FileSearch {
TextSummarizer localSummarizer;
synchronized (searchCache) {
localSummarizer = SummaryHelpers.getLocalSummarizer();
}
if (localSummarizer != null) {
try {
@ -247,17 +261,21 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static Map<GroupKey, List<Result>> runFileSearch(String userName,
public static Map<GroupKey, List<Result>> runFileSearch(String userName,
List<AbstractFilter> filters,
AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group
@ -268,10 +286,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb);
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb);
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -295,13 +313,17 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if not
* needed.
* @param context The SearchContext the search is being performed
* from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb)
throws DiscoveryException {
private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context)
throws DiscoveryException, SearchCancellationException {
for (AttributeType attr : attrs) {
attr.addAttributeToResults(results, caseDb, centralRepoDb);
attr.addAttributeToResults(results, caseDb, centralRepoDb, context);
}
}

View File

@ -0,0 +1,40 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
import java.util.concurrent.CancellationException;
/**
* Exception to be thrown when the search has been intentionally cancelled to
* provide information on where the code was when the cancellation took place.
*/
public class SearchCancellationException extends CancellationException {
private static final long serialVersionUID = 1L;
/**
* Construct a new SearchCancellationException with the specified message.
*
* @param message The text to use as the message for the exception.
*/
SearchCancellationException(String message) {
super(message);
}
}

View File

@ -0,0 +1,33 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Interface for providing feedback on if a search has been cancelled.
*
*/
public interface SearchContext {
/**
* Returns true if the search has been cancelled, false otherwise.
*
* @return True if the search has been cancelled, false otherwise.
*/
boolean searchIsCancelled();
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -60,10 +60,16 @@ public class SearchFiltering {
* @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters
* need it.
* @param context The SearchContext the search is being performed
* from.
*
* @return List of Results from the search performed.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (caseDb == null) {
throw new DiscoveryException("Case DB parameter is null"); // NON-NLS
}
@ -82,8 +88,11 @@ public class SearchFiltering {
// The file search filter is required, so this should never be empty.
throw new DiscoveryException("Selected filters do not include a case database query");
}
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before result list could be retrieved.");
}
try {
return getResultList(filters, combinedQuery, caseDb, centralRepoDb);
return getResultList(filters, combinedQuery, caseDb, centralRepoDb, context);
} catch (TskCoreException ex) {
throw new DiscoveryException("Error querying case database", ex); // NON-NLS
}
@ -97,17 +106,23 @@ public class SearchFiltering {
* @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters
* need it.
* @param context The SearchContext the search is being performed
* from.
*
* @return An ArrayList of Results returned by the query.
*
* @throws TskCoreException
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws TskCoreException, DiscoveryException {
private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws TskCoreException, DiscoveryException, SearchCancellationException {
// Get all matching abstract files
List<Result> resultList = new ArrayList<>();
List<AbstractFile> sqlResults = caseDb.findAllFilesWhere(combinedQuery);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while the case database query was being performed.");
}
// If there are no results, return now
if (sqlResults.isEmpty()) {
return resultList;
@ -120,8 +135,11 @@ public class SearchFiltering {
// Now run any non-SQL filters.
for (AbstractFilter filter : filters) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while alternate filters were being applied.");
}
if (filter.useAlternateFilter()) {
resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb);
resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb, context);
}
// There are no matches for the filters run so far, so return
if (resultList.isEmpty()) {
@ -243,7 +261,8 @@ public class SearchFiltering {
}
/**
* Used by backend domain search code to query for additional artifact types.
* Used by backend domain search code to query for additional artifact
* types.
*/
String getWhereClause(List<ARTIFACT_TYPE> nonVisibleArtifactTypesToInclude) {
StringJoiner joiner = joinStandardArtifactTypes();
@ -674,14 +693,17 @@ public class SearchFiltering {
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Set the frequency for each file
DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute();
freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb);
freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
// If the frequency matches the filter, add the file to the results
List<Result> frequencyResults = new ArrayList<>();
for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency alternate filter was being applied.");
}
if (frequencies.contains(file.getFrequency())) {
frequencyResults.add(file);
}
@ -723,9 +745,12 @@ public class SearchFiltering {
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<Result> filteredResults = new ArrayList<>();
for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Known Account Type alternate filter was being applied.");
}
if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result;
if (domain.hasKnownAccountType()) {
@ -765,11 +790,14 @@ public class SearchFiltering {
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute();
previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb);
previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
List<Result> filteredResults = new ArrayList<>();
for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable alternate filter was being applied.");
}
if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) {
filteredResults.add(file);
}
@ -1068,7 +1096,7 @@ public class SearchFiltering {
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) {
throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS
@ -1087,6 +1115,9 @@ public class SearchFiltering {
CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID);
for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Notable alternate filter was being applied.");
}
ResultFile file = (ResultFile) result;
if (result.getType() == SearchData.Type.DOMAIN) {
break;

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -31,7 +31,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import javax.swing.SwingUtilities;
import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
@ -574,7 +573,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
}
private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed
// Get the selected filters
setVisible(false); //set visible used here instead of dispose incase dispose code changes
final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent();
if (tc == null) {
setValid("No Top Component Found");
@ -584,6 +583,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
tc.open();
}
tc.resetTopComponent();
// Get the selected filters
List<AbstractFilter> filters;
if (videosButton.isSelected()) {
filters = videoFilterPanel.getFilters();
@ -617,7 +617,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
}
searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort);
searchWorker.execute();
dispose();
tc.toFront();
tc.requestActive();
}//GEN-LAST:event_searchButtonActionPerformed
@ -651,6 +650,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
void cancelSearch() {
if (searchWorker != null) {
searchWorker.cancel(true);
searchWorker = null;
}
}
@ -750,7 +750,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) {
shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems);
}
}
} catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing.

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -260,7 +260,6 @@ public final class DiscoveryTopComponent extends TopComponent {
private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed
close();
final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance();
discDialog.cancelSearch();
discDialog.setVisible(true);
discDialog.validateDialog();
}//GEN-LAST:event_newSearchButtonActionPerformed

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -36,6 +36,8 @@ import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.Result;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/**
* SwingWorker to retrieve the contents of a page.
@ -87,7 +89,7 @@ final class PageWorker extends SwingWorker<Void, Void> {
@Override
protected Void doInBackground() throws Exception {
SearchContext context = new SwingWorkerSearchContext(this);
try {
// Run the search
if (resultType == SearchData.Type.DOMAIN) {
@ -96,17 +98,22 @@ final class PageWorker extends SwingWorker<Void, Void> {
groupingAttribute,
groupSort,
fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo));
Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
} else {
results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters,
groupingAttribute,
groupSort,
fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo));
Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
}
} catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex);
cancel(true);
} catch (SearchCancellationException ex) {
//The user does not explicitly have a way to cancel the loading of a page
//but they could have cancelled the search during the loading of the first page
//So this may or may not be an issue depending on when this occurred.
logger.log(Level.WARNING, "Search was cancelled while retrieving data for results page with starting entry: " + startingEntry, ex);
}
return null;
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -35,6 +35,8 @@ import org.sleuthkit.autopsy.discovery.search.FileSearch;
import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
import org.sleuthkit.autopsy.discovery.search.SearchData;
/**
@ -75,23 +77,28 @@ final class SearchWorker extends SwingWorker<Void, Void> {
protected Void doInBackground() throws Exception {
try {
// Run the search
SearchContext context = new SwingWorkerSearchContext(this);
if (searchType == SearchData.Type.DOMAIN) {
DomainSearch domainSearch = new DomainSearch();
results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr,
groupSortAlgorithm,
fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb));
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
} else {
results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr,
groupSortAlgorithm,
fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb));
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
}
} catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex);
logger.log(Level.SEVERE, "Error running file search test.", ex);
cancel(true);
} catch (SearchCancellationException ex) {
//search cancellation exceptions should indicate that the user chose to cancell this search
//so would not be a problem but we might be curious what was being done when it was cancelled
logger.log(Level.INFO, "Discovery search was cancelled.", ex);
}
return null;
}

View File

@ -0,0 +1,45 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.ui;
import javax.swing.SwingWorker;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/**
* Implementation of SearchContext for searches being performed in the
* background thread of a SwingWorker.
*/
class SwingWorkerSearchContext implements SearchContext {
private final SwingWorker<Void, Void> searchWorker;
/**
* Construct a new SwingWorkerSearchContext.
*
* @param worker The SwingWorker the search is being performed in.
*/
SwingWorkerSearchContext(SwingWorker<Void, Void> worker) {
searchWorker = worker;
}
@Override
public boolean searchIsCancelled() {
return searchWorker.isCancelled();
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -52,7 +52,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -88,7 +88,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -121,7 +121,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DATA_SOURCE,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -155,7 +155,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_SIZE,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -173,4 +173,5 @@ public class DomainSearchCacheLoaderTest {
}
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey;
@ -46,11 +45,11 @@ public class DomainSearchTest {
);
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue());
}
@ -81,11 +80,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue());
assertEquals(3, sizes.get(groupTwo).longValue());
assertEquals(1, sizes.get(groupThree).longValue());
@ -95,11 +94,11 @@ public class DomainSearchTest {
public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>());
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(new HashMap<>());
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(0, sizes.size());
}
@ -120,11 +119,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null);
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i));
@ -148,11 +147,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null);
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null, new TestSearchContextImpl(false));
assertEquals(4, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i));
@ -176,18 +175,18 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null);
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i));
}
}
@Test
@Test
public void getDomains_SingleGroupLastPageLastDomain_ShouldContainLastDomain() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -204,11 +203,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null);
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null, new TestSearchContextImpl(false));
assertEquals(1, firstPage.size());
assertEquals(domains.get(domains.size() - 1), firstPage.get(0));
}
@ -230,11 +229,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null);
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size());
}
@ -255,11 +254,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null);
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size());
}
@ -290,11 +289,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null);
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size());
}
@ -325,11 +324,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null);
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i));
@ -357,7 +356,7 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
@ -365,9 +364,9 @@ public class DomainSearchTest {
int size = 2;
while (start + size <= domains.size()) {
List<Result> page = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, start, size, null, null);
new ArrayList<>(), null, null, null, groupOne, start, size, null, null, new TestSearchContextImpl(false));
assertEquals(2, page.size());
for(int i = 0; i < page.size(); i++) {
for (int i = 0; i < page.size(); i++) {
assertEquals(domains.get(start + i), page.get(i));
}
@ -379,7 +378,7 @@ public class DomainSearchTest {
private final String name;
public DummyKey(String name) {
DummyKey(String name) {
this.name = name;
}

View File

@ -0,0 +1,37 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Implementation of SearchContext for testing to ensure NPEs are not thrown and
* the context indicates the expected cancellation status.
*/
public class TestSearchContextImpl implements SearchContext {
private final boolean isCancelled;
public TestSearchContextImpl(boolean hasBeenCancelled) {
isCancelled = hasBeenCancelled;
}
@Override
public boolean searchIsCancelled() {
return isCancelled;
}
}