Merge pull request #7205 from wschaeferB/7877-ImprovedDiscoverySearchCancellation

7877 improved discovery search cancellation
This commit is contained in:
Richard Cordovano 2021-08-26 16:45:35 -04:00 committed by GitHub
commit daf22f6d5c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 530 additions and 162 deletions

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -54,14 +54,18 @@ public abstract class AbstractFilter {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repo database. Can be null if the * @param centralRepoDb The central repo database. Can be null if the
* filter does not require it. * filter does not require it.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
* *
* @return The list of results that match this filter (and any that came * @return The list of results that match this filter (and any that came
* before it) * before it)
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException Thrown when the user has cancelled
* the search.
*/ */
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
return new ArrayList<>(); return new ArrayList<>();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -78,10 +78,14 @@ public class DiscoveryAttributes {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/ */
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Default is to do nothing // Default is to do nothing
} }
} }
@ -154,10 +158,13 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try { try {
Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb); Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb, context);
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain Category Attribute was being added.");
}
if (result instanceof ResultDomain) { if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result; ResultDomain domain = (ResultDomain) result;
domain.addWebCategories(domainsToCategories.get(domain.getDomain())); domain.addWebCategories(domainsToCategories.get(domain.getDomain()));
@ -172,14 +179,29 @@ public class DiscoveryAttributes {
* Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to * Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to
* the category name attribute. Each ResultDomain is then parsed and * the category name attribute. Each ResultDomain is then parsed and
* matched against this map of values. * matched against this map of values.
*
* @param caseDb The case database.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return domainToCategory - A map of the domain names to the category
* name attribute they are classified as.
*
* @throws TskCoreException
* @throws InterruptedException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/ */
private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb) throws TskCoreException, InterruptedException { private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb, SearchContext context) throws TskCoreException, InterruptedException, SearchCancellationException {
Map<String, Set<String>> domainToCategory = new HashMap<>(); Map<String, Set<String>> domainToCategory = new HashMap<>();
for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) { for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) {
if (Thread.currentThread().isInterrupted()) { if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException(); throw new InterruptedException();
} }
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while getting domains for artifact type: " + artifact.getDisplayName());
}
BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME)); BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME));
BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN)); BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN));
if (webCategory != null && domain != null) { if (webCategory != null && domain != null) {
@ -206,14 +228,16 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, keyword list name) for all files in the list of files that have // Get pairs of (object ID, keyword list name) for all files in the list of files that have
// keyword list hits. // keyword list hits.
String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(), String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(),
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results); SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Keyword List Attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -278,8 +302,20 @@ public class DiscoveryAttributes {
* Example: query for notable status of google.com. Result: notable With * Example: query for notable status of google.com. Result: notable With
* this map, all domain instances that represent google.com can be updated * this map, all domain instances that represent google.com can be updated
* after one simple lookup. * after one simple lookup.
*
* @param domainsBatch The list of ResultDomains to organize.
* @param attributeType The type of correlation attribute being organized.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return resultDomainTable - A map of the normalized domain name to the
* list of ResultDomain objects which are part of that normalized
* domain.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType) { private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType, SearchContext context) throws SearchCancellationException {
final Map<String, List<ResultDomain>> resultDomainTable = new HashMap<>(); final Map<String, List<ResultDomain>> resultDomainTable = new HashMap<>();
for (ResultDomain domainInstance : domainsBatch) { for (ResultDomain domainInstance : domainsBatch) {
try { try {
@ -288,6 +324,9 @@ public class DiscoveryAttributes {
final List<ResultDomain> bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>()); final List<ResultDomain> bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>());
bucket.add(domainInstance); bucket.add(domainInstance);
resultDomainTable.put(normalizedDomain, bucket); resultDomainTable.put(normalizedDomain, bucket);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while orgainizing domains by their normalized value.");
}
} catch (CorrelationAttributeNormalizationException ex) { } catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain())); logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain()));
} }
@ -322,39 +361,73 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb != null) { if (centralRepoDb != null) {
processFilesWithCr(results, centralRepoDb); processFilesWithCr(results, centralRepoDb, context);
} }
} }
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo) throws DiscoveryException { /**
* Helper method to batch the domain results and check for notability.
*
* @param results The results which are being checked for previously
* being notable in the CR.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultDomain> domainsBatch = new ArrayList<>(); List<ResultDomain> domainsBatch = new ArrayList<>();
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable attribute was being calculated with the CR.");
}
if (result.getType() == SearchData.Type.DOMAIN) { if (result.getType() == SearchData.Type.DOMAIN) {
domainsBatch.add((ResultDomain) result); domainsBatch.add((ResultDomain) result);
if (domainsBatch.size() == DOMAIN_BATCH_SIZE) { if (domainsBatch.size() == DOMAIN_BATCH_SIZE) {
queryPreviouslyNotable(domainsBatch, centralRepo); queryPreviouslyNotable(domainsBatch, centralRepo, context);
domainsBatch.clear(); domainsBatch.clear();
} }
} }
} }
queryPreviouslyNotable(domainsBatch, centralRepo); queryPreviouslyNotable(domainsBatch, centralRepo, context);
} }
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo) throws DiscoveryException { /**
* Helper method to check a batch of domains for notability.
*
*
* @param domainsBatch The list of ResultDomains to check for
* notability.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsBatch.isEmpty()) { if (domainsBatch.isEmpty()) {
return; return;
} }
try { try {
final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType); final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType, context);
final String values = createCSV(resultDomainTable.keySet()); final String values = createCSV(resultDomainTable.keySet());
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while checking for previously notable domains.");
}
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name " final String domainFrequencyQuery = " value AS domain_name "
+ "FROM " + tableName + " " + "FROM " + tableName + " "
@ -421,7 +494,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) { if (centralRepoDb == null) {
for (Result result : results) { for (Result result : results) {
if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) { if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) {
@ -429,7 +502,7 @@ public class DiscoveryAttributes {
} }
} }
} else { } else {
processResultFilesForCR(results, centralRepoDb); processResultFilesForCR(results, centralRepoDb, context);
} }
} }
@ -437,16 +510,26 @@ public class DiscoveryAttributes {
* Private helper method for adding Frequency attribute when CR is * Private helper method for adding Frequency attribute when CR is
* enabled. * enabled.
* *
* @param files The list of ResultFiles to caluclate frequency * @param results The results which are having their frequency
* for. * checked.
* @param centralRepoDb The central repository currently in use. * @param centralRepoDb The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/ */
private void processResultFilesForCR(List<Result> results, private void processResultFilesForCR(List<Result> results,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultFile> currentFiles = new ArrayList<>(); List<ResultFile> currentFiles = new ArrayList<>();
Set<String> hashesToLookUp = new HashSet<>(); Set<String> hashesToLookUp = new HashSet<>();
List<ResultDomain> domainsToQuery = new ArrayList<>(); List<ResultDomain> domainsToQuery = new ArrayList<>();
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency attribute was being calculated with the CR.");
}
// If frequency was already calculated, skip... // If frequency was already calculated, skip...
if (result.getFrequency() == SearchData.Frequency.UNKNOWN) { if (result.getFrequency() == SearchData.Frequency.UNKNOWN) {
if (result.getKnown() == TskData.FileKnown.KNOWN) { if (result.getKnown() == TskData.FileKnown.KNOWN) {
@ -462,7 +545,7 @@ public class DiscoveryAttributes {
} }
if (hashesToLookUp.size() >= BATCH_SIZE) { if (hashesToLookUp.size() >= BATCH_SIZE) {
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
hashesToLookUp.clear(); hashesToLookUp.clear();
currentFiles.clear(); currentFiles.clear();
@ -470,16 +553,15 @@ public class DiscoveryAttributes {
} else { } else {
domainsToQuery.add((ResultDomain) result); domainsToQuery.add((ResultDomain) result);
if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) { if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) {
queryDomainFrequency(domainsToQuery, centralRepoDb); queryDomainFrequency(domainsToQuery, centralRepoDb, context);
domainsToQuery.clear(); domainsToQuery.clear();
} }
} }
} }
} }
queryDomainFrequency(domainsToQuery, centralRepoDb); queryDomainFrequency(domainsToQuery, centralRepoDb, context);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
} }
} }
@ -487,17 +569,22 @@ public class DiscoveryAttributes {
* Query to get the frequency of a domain. * Query to get the frequency of a domain.
* *
* @param domainsToQuery List of domains to check the frequency of. * @param domainsToQuery List of domains to check the frequency of.
* @param centralRepository The central repository to query. * @param centralRepository The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository) throws DiscoveryException { private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsToQuery.isEmpty()) { if (domainsToQuery.isEmpty()) {
return; return;
} }
try { try {
final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType); final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType, context);
final String values = createCSV(resultDomainTable.keySet()); final String values = createCSV(resultDomainTable.keySet());
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM" final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM"
@ -508,8 +595,11 @@ public class DiscoveryAttributes {
+ ")) AS foo GROUP BY value"; + ")) AS foo GROUP BY value";
final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable); final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
if (frequencyCallback.getCause() != null) { if (frequencyCallback.getCause() != null) {
throw frequencyCallback.getCause(); throw frequencyCallback.getCause();
} }
@ -620,7 +710,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, hash set name) for all files in the list of files that have // Get pairs of (object ID, hash set name) for all files in the list of files that have
// hash set hits. // hash set hits.
@ -628,6 +718,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
HashSetNamesCallback callback = new HashSetNamesCallback(results); HashSetNamesCallback callback = new HashSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Hash Hit attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -695,7 +788,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, interesting item set name) for all files in the list of files that have // Get pairs of (object ID, interesting item set name) for all files in the list of files that have
// interesting file set hits. // interesting file set hits.
@ -703,6 +796,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results); InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Interesting Item attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -808,7 +904,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, object type name) for all files in the list of files that have // Get pairs of (object ID, object type name) for all files in the list of files that have
// objects detected // objects detected
@ -816,6 +912,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID());
ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results); ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Object Detected attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -884,10 +983,13 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try { try {
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while File Tag attribute was being added.");
}
if (result.getType() == SearchData.Type.DOMAIN) { if (result.getType() == SearchData.Type.DOMAIN) {
return; return;
} }
@ -995,14 +1097,20 @@ public class DiscoveryAttributes {
} }
/** /**
*
* Computes the CR frequency of all the given hashes and updates the list of * Computes the CR frequency of all the given hashes and updates the list of
* files. * files.
* *
* @param hashesToLookUp Hashes to find the frequency of. * @param hashesToLookUp Hashes to find the frequency of.
* @param currentFiles List of files to update with frequencies. * @param currentFiles List of files to update with frequencies.
* @param centralRepoDb The central repository being used. * @param centralRepoDb The central repository being used.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb) { private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb, SearchContext context) throws SearchCancellationException {
if (hashesToLookUp.isEmpty()) { if (hashesToLookUp.isEmpty()) {
return; return;
@ -1022,7 +1130,9 @@ public class DiscoveryAttributes {
FrequencyCallback callback = new FrequencyCallback(currentFiles); FrequencyCallback callback = new FrequencyCallback(currentFiles);
centralRepoDb.processSelectClause(selectClause, callback); centralRepoDb.processSelectClause(selectClause, callback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
} catch (CentralRepoException ex) { } catch (CentralRepoException ex) {
logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS
} }

View File

@ -59,6 +59,7 @@ public class DiscoveryKeyUtils {
private final List<AbstractFilter> filters; private final List<AbstractFilter> filters;
private final SleuthkitCase sleuthkitCase; private final SleuthkitCase sleuthkitCase;
private final CentralRepository centralRepository; private final CentralRepository centralRepository;
private final SearchContext context;
/** /**
* Construct a new SearchKey with all information that defines a search. * Construct a new SearchKey with all information that defines a search.
@ -70,16 +71,20 @@ public class DiscoveryKeyUtils {
* @param sortingMethod The method to sort the results by. * @param sortingMethod The method to sort the results by.
* @param sleuthkitCase The SleuthkitCase being searched. * @param sleuthkitCase The SleuthkitCase being searched.
* @param centralRepository The Central Repository being searched. * @param centralRepository The Central Repository being searched.
* @param context The SearchContext which reflects the search
* being performed to get results for this
* key.
*/ */
SearchKey(String userName, List<AbstractFilter> filters, SearchKey(String userName, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType, DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod, ResultsSorter.SortingMethod sortingMethod,
SleuthkitCase sleuthkitCase, CentralRepository centralRepository) { SleuthkitCase sleuthkitCase, CentralRepository centralRepository, SearchContext context) {
this.groupAttributeType = groupAttributeType; this.groupAttributeType = groupAttributeType;
this.groupSortingType = groupSortingType; this.groupSortingType = groupSortingType;
this.sortingMethod = sortingMethod; this.sortingMethod = sortingMethod;
this.filters = filters; this.filters = filters;
this.context = context;
StringBuilder searchStringBuilder = new StringBuilder(); StringBuilder searchStringBuilder = new StringBuilder();
searchStringBuilder.append(userName); searchStringBuilder.append(userName);
@ -93,8 +98,8 @@ public class DiscoveryKeyUtils {
} }
/** /**
* Construct a SearchKey without a SleuthkitCase or CentralRepositry * Construct a SearchKey without a SearchContext, SleuthkitCase or
* instance. * CentralRepositry instance.
* *
* @param userName The name of the user performing the search. * @param userName The name of the user performing the search.
* @param filters The Filters being used for the search. * @param filters The Filters being used for the search.
@ -107,7 +112,8 @@ public class DiscoveryKeyUtils {
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod) { ResultsSorter.SortingMethod sortingMethod) {
this(userName, filters, groupAttributeType, groupSortingType, this(userName, filters, groupAttributeType, groupSortingType,
sortingMethod, null, null); sortingMethod, null, null, null);
//this constructor should only be used putting things directly into a map or getting if present since casedb, cr, and search context will be null
} }
@Override @Override
@ -141,6 +147,23 @@ public class DiscoveryKeyUtils {
return hash; return hash;
} }
/**
* Get the SearchContext for the search this key is being used in.
*
* @return The SearchContext the search key is being used in.
*
* @throws DiscoveryException Thrown when the key being used has a null
* context indicating it was not created with
* knowledge of the case or central
* repository databases.
*/
SearchContext getContext() throws DiscoveryException {
if (context == null) {
throw new DiscoveryException("The key in use was created without a context and does not support retrieving information from the databases.");
}
return context;
}
/** /**
* Get the String representation of this key. * Get the String representation of this key.
* *

View File

@ -78,24 +78,31 @@ public class DomainSearch {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null * @param centralRepoDb The central repository database. Can be null
* if not needed. * if not needed.
* @param context The SearchContext the search is being performed from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters. * @return A LinkedHashMap grouped and sorted according to the parameters.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
public Map<GroupKey, Integer> getGroupSizes(String userName, public Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType, DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod, ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get( final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType, userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb); domainSortingMethod, caseDb, centralRepoDb, context);
// Transform the cached results into a map of group key to group size. // Transform the cached results into a map of group key to group size.
final LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>(); final LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) { for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size()); groupSizes.put(groupKey, searchResults.get(groupKey).size());
} }
@ -130,11 +137,11 @@ public class DomainSearch {
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod, ResultsSorter.SortingMethod domainSortingMethod,
GroupKey groupKey, int startingEntry, int numberOfEntries, GroupKey groupKey, int startingEntry, int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get( final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType, userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb); domainSortingMethod, caseDb, centralRepoDb, context);
final List<Result> domainsInGroup = searchResults.get(groupKey); final List<Result> domainsInGroup = searchResults.get(groupKey);
final List<Result> page = new ArrayList<>(); final List<Result> page = new ArrayList<>();
for (int i = startingEntry; (i < startingEntry + numberOfEntries) for (int i = startingEntry; (i < startingEntry + numberOfEntries)

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -55,20 +55,24 @@ class DomainSearchCache {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return Domain search results matching the given parameters. * @return Domain search results matching the given parameters.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
Map<GroupKey, List<Result>> get(String userName, Map<GroupKey, List<Result>> get(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType, DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod, ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try { try {
final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType,
groupSortingType, domainSortingMethod, caseDb, centralRepoDb); groupSortingType, domainSortingMethod, caseDb, centralRepoDb, context);
return cache.get(searchKey); return cache.get(searchKey);
} catch (ExecutionException ex) { } catch (ExecutionException ex) {
throw new DiscoveryException("Error fetching results from cache", ex.getCause()); throw new DiscoveryException("Error fetching results from cache", ex.getCause());

View File

@ -73,7 +73,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException(); throw new InterruptedException();
} }
attr.addAttributeToResults(domainResults, attr.addAttributeToResults(domainResults,
key.getSleuthkitCase(), key.getCentralRepository()); key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
} }
// Apply secondary in memory filters // Apply secondary in memory filters
for (AbstractFilter filter : key.getFilters()) { for (AbstractFilter filter : key.getFilters()) {
@ -81,7 +81,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException(); throw new InterruptedException();
} }
if (filter.useAlternateFilter()) { if (filter.useAlternateFilter()) {
domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository()); domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
} }
} }
// Sort the ResultDomains by the requested criteria. // Sort the ResultDomains by the requested criteria.

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -62,17 +62,21 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return The raw search results * @return The raw search results
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
static SearchResults runFileSearchDebug(String userName, static SearchResults runFileSearchDebug(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
AttributeType groupAttributeType, AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod, ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the // Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group // ResultFile objects will have all needed fields set when it's time to group
// and sort them. For example, if we're grouping by central repo frequency, we need // and sort them. For example, if we're grouping by central repo frequency, we need
@ -82,10 +86,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter // Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping // Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results // Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -114,21 +118,28 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters * @return A LinkedHashMap grouped and sorted according to the parameters
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
public static Map<GroupKey, Integer> getGroupSizes(String userName, public static Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
AttributeType groupAttributeType, AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod, ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
Map<GroupKey, List<Result>> searchResults = runFileSearch(userName, filters, Map<GroupKey, List<Result>> searchResults = runFileSearch(userName, filters,
groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>(); LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) { for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size()); groupSizes.put(groupKey, searchResults.get(groupKey).size());
} }
return groupSizes; return groupSizes;
@ -151,10 +162,14 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters * @return A LinkedHashMap grouped and sorted according to the parameters
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
public static List<Result> getFilesInGroup(String userName, public static List<Result> getFilesInGroup(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
@ -164,7 +179,7 @@ public class FileSearch {
GroupKey groupKey, GroupKey groupKey,
int startingEntry, int startingEntry,
int numberOfEntries, int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
//the group should be in the cache at this point //the group should be in the cache at this point
List<Result> filesInGroup = null; List<Result> filesInGroup = null;
SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod); SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod);
@ -178,7 +193,7 @@ public class FileSearch {
List<Result> page = new ArrayList<>(); List<Result> page = new ArrayList<>();
if (filesInGroup == null) { if (filesInGroup == null) {
logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey); logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey);
runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
synchronized (searchCache) { synchronized (searchCache) {
resultsMap = searchCache.getIfPresent(searchKey.getKeyString()); resultsMap = searchCache.getIfPresent(searchKey.getKeyString());
} }
@ -218,7 +233,6 @@ public class FileSearch {
TextSummarizer localSummarizer; TextSummarizer localSummarizer;
synchronized (searchCache) { synchronized (searchCache) {
localSummarizer = SummaryHelpers.getLocalSummarizer(); localSummarizer = SummaryHelpers.getLocalSummarizer();
} }
if (localSummarizer != null) { if (localSummarizer != null) {
try { try {
@ -247,17 +261,21 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters * @return A LinkedHashMap grouped and sorted according to the parameters
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static Map<GroupKey, List<Result>> runFileSearch(String userName, public static Map<GroupKey, List<Result>> runFileSearch(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
AttributeType groupAttributeType, AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod, ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the // Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group // ResultFile objects will have all needed fields set when it's time to group
@ -268,10 +286,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter // Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping // Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results // Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -295,13 +313,17 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if not * @param centralRepoDb The central repository database. Can be null if not
* needed. * needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb) private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context)
throws DiscoveryException { throws DiscoveryException, SearchCancellationException {
for (AttributeType attr : attrs) { for (AttributeType attr : attrs) {
attr.addAttributeToResults(results, caseDb, centralRepoDb); attr.addAttributeToResults(results, caseDb, centralRepoDb, context);
} }
} }

View File

@ -0,0 +1,40 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
import java.util.concurrent.CancellationException;
/**
* Exception to be thrown when the search has been intentionally cancelled to
* provide information on where the code was when the cancellation took place.
*/
public class SearchCancellationException extends CancellationException {
private static final long serialVersionUID = 1L;
/**
* Construct a new SearchCancellationException with the specified message.
*
* @param message The text to use as the message for the exception.
*/
SearchCancellationException(String message) {
super(message);
}
}

View File

@ -0,0 +1,33 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Interface for providing feedback on if a search has been cancelled.
*
*/
public interface SearchContext {
/**
* Returns true if the search has been cancelled, false otherwise.
*
* @return True if the search has been cancelled, false otherwise.
*/
boolean searchIsCancelled();
}

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -60,10 +60,16 @@ public class SearchFiltering {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters * @param centralRepoDb The central repo. Can be null as long as no filters
* need it. * need it.
* @param context The SearchContext the search is being performed
* from.
* *
* @return List of Results from the search performed. * @return List of Results from the search performed.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (caseDb == null) { if (caseDb == null) {
throw new DiscoveryException("Case DB parameter is null"); // NON-NLS throw new DiscoveryException("Case DB parameter is null"); // NON-NLS
} }
@ -82,8 +88,11 @@ public class SearchFiltering {
// The file search filter is required, so this should never be empty. // The file search filter is required, so this should never be empty.
throw new DiscoveryException("Selected filters do not include a case database query"); throw new DiscoveryException("Selected filters do not include a case database query");
} }
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before result list could be retrieved.");
}
try { try {
return getResultList(filters, combinedQuery, caseDb, centralRepoDb); return getResultList(filters, combinedQuery, caseDb, centralRepoDb, context);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
throw new DiscoveryException("Error querying case database", ex); // NON-NLS throw new DiscoveryException("Error querying case database", ex); // NON-NLS
} }
@ -97,17 +106,23 @@ public class SearchFiltering {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters * @param centralRepoDb The central repo. Can be null as long as no filters
* need it. * need it.
* @param context The SearchContext the search is being performed
* from.
* *
* @return An ArrayList of Results returned by the query. * @return An ArrayList of Results returned by the query.
* *
* @throws TskCoreException * @throws TskCoreException
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws TskCoreException, DiscoveryException { private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws TskCoreException, DiscoveryException, SearchCancellationException {
// Get all matching abstract files // Get all matching abstract files
List<Result> resultList = new ArrayList<>(); List<Result> resultList = new ArrayList<>();
List<AbstractFile> sqlResults = caseDb.findAllFilesWhere(combinedQuery); List<AbstractFile> sqlResults = caseDb.findAllFilesWhere(combinedQuery);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while the case database query was being performed.");
}
// If there are no results, return now // If there are no results, return now
if (sqlResults.isEmpty()) { if (sqlResults.isEmpty()) {
return resultList; return resultList;
@ -120,8 +135,11 @@ public class SearchFiltering {
// Now run any non-SQL filters. // Now run any non-SQL filters.
for (AbstractFilter filter : filters) { for (AbstractFilter filter : filters) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while alternate filters were being applied.");
}
if (filter.useAlternateFilter()) { if (filter.useAlternateFilter()) {
resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb); resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb, context);
} }
// There are no matches for the filters run so far, so return // There are no matches for the filters run so far, so return
if (resultList.isEmpty()) { if (resultList.isEmpty()) {
@ -227,7 +245,7 @@ public class SearchFiltering {
public Collection<ARTIFACT_TYPE> getTypes() { public Collection<ARTIFACT_TYPE> getTypes() {
return Collections.unmodifiableCollection(types); return Collections.unmodifiableCollection(types);
} }
private StringJoiner joinStandardArtifactTypes() { private StringJoiner joinStandardArtifactTypes() {
StringJoiner joiner = new StringJoiner(","); StringJoiner joiner = new StringJoiner(",");
for (ARTIFACT_TYPE type : types) { for (ARTIFACT_TYPE type : types) {
@ -241,9 +259,10 @@ public class SearchFiltering {
StringJoiner joiner = joinStandardArtifactTypes(); StringJoiner joiner = joinStandardArtifactTypes();
return "artifact_type_id IN (" + joiner + ")"; return "artifact_type_id IN (" + joiner + ")";
} }
/** /**
* Used by backend domain search code to query for additional artifact types. * Used by backend domain search code to query for additional artifact
* types.
*/ */
String getWhereClause(List<ARTIFACT_TYPE> nonVisibleArtifactTypesToInclude) { String getWhereClause(List<ARTIFACT_TYPE> nonVisibleArtifactTypesToInclude) {
StringJoiner joiner = joinStandardArtifactTypes(); StringJoiner joiner = joinStandardArtifactTypes();
@ -674,14 +693,17 @@ public class SearchFiltering {
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Set the frequency for each file // Set the frequency for each file
DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute(); DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute();
freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
// If the frequency matches the filter, add the file to the results // If the frequency matches the filter, add the file to the results
List<Result> frequencyResults = new ArrayList<>(); List<Result> frequencyResults = new ArrayList<>();
for (Result file : currentResults) { for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency alternate filter was being applied.");
}
if (frequencies.contains(file.getFrequency())) { if (frequencies.contains(file.getFrequency())) {
frequencyResults.add(file); frequencyResults.add(file);
} }
@ -705,7 +727,7 @@ public class SearchFiltering {
return Bundle.SearchFiltering_FrequencyFilter_desc(desc); return Bundle.SearchFiltering_FrequencyFilter_desc(desc);
} }
} }
/** /**
* A filter for domains with known account types. * A filter for domains with known account types.
*/ */
@ -715,17 +737,20 @@ public class SearchFiltering {
public String getWhereClause() { public String getWhereClause() {
throw new UnsupportedOperationException("Not supported, this is an alternative filter."); throw new UnsupportedOperationException("Not supported, this is an alternative filter.");
} }
@Override @Override
public boolean useAlternateFilter() { public boolean useAlternateFilter() {
return true; return true;
} }
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<Result> filteredResults = new ArrayList<>(); List<Result> filteredResults = new ArrayList<>();
for (Result result : currentResults) { for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Known Account Type alternate filter was being applied.");
}
if (result instanceof ResultDomain) { if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result; ResultDomain domain = (ResultDomain) result;
if (domain.hasKnownAccountType()) { if (domain.hasKnownAccountType()) {
@ -745,9 +770,9 @@ public class SearchFiltering {
public String getDesc() { public String getDesc() {
return Bundle.SearchFiltering_KnownAccountTypeFilter_desc(); return Bundle.SearchFiltering_KnownAccountTypeFilter_desc();
} }
} }
/** /**
* A filter for previously notable content in the central repository. * A filter for previously notable content in the central repository.
*/ */
@ -757,19 +782,22 @@ public class SearchFiltering {
public String getWhereClause() { public String getWhereClause() {
throw new UnsupportedOperationException("Not supported, this is an alternative filter."); throw new UnsupportedOperationException("Not supported, this is an alternative filter.");
} }
@Override @Override
public boolean useAlternateFilter() { public boolean useAlternateFilter() {
return true; return true;
} }
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute(); DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute();
previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
List<Result> filteredResults = new ArrayList<>(); List<Result> filteredResults = new ArrayList<>();
for (Result file : currentResults) { for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable alternate filter was being applied.");
}
if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) { if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) {
filteredResults.add(file); filteredResults.add(file);
} }
@ -784,7 +812,7 @@ public class SearchFiltering {
public String getDesc() { public String getDesc() {
return Bundle.SearchFiltering_PreviouslyNotableFilter_desc(); return Bundle.SearchFiltering_PreviouslyNotableFilter_desc();
} }
} }
/** /**
@ -1068,7 +1096,7 @@ public class SearchFiltering {
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) { if (centralRepoDb == null) {
throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS
@ -1087,6 +1115,9 @@ public class SearchFiltering {
CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID); CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID);
for (Result result : currentResults) { for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Notable alternate filter was being applied.");
}
ResultFile file = (ResultFile) result; ResultFile file = (ResultFile) result;
if (result.getType() == SearchData.Type.DOMAIN) { if (result.getType() == SearchData.Type.DOMAIN) {
break; break;

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -31,7 +31,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.logging.Level; import java.util.logging.Level;
import javax.swing.SwingUtilities;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager; import org.openide.windows.WindowManager;
@ -574,7 +573,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
} }
private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed
// Get the selected filters setVisible(false); //set visible used here instead of dispose incase dispose code changes
final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent(); final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent();
if (tc == null) { if (tc == null) {
setValid("No Top Component Found"); setValid("No Top Component Found");
@ -584,6 +583,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
tc.open(); tc.open();
} }
tc.resetTopComponent(); tc.resetTopComponent();
// Get the selected filters
List<AbstractFilter> filters; List<AbstractFilter> filters;
if (videosButton.isSelected()) { if (videosButton.isSelected()) {
filters = videoFilterPanel.getFilters(); filters = videoFilterPanel.getFilters();
@ -617,7 +617,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
} }
searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort); searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort);
searchWorker.execute(); searchWorker.execute();
dispose();
tc.toFront(); tc.toFront();
tc.requestActive(); tc.requestActive();
}//GEN-LAST:event_searchButtonActionPerformed }//GEN-LAST:event_searchButtonActionPerformed
@ -651,6 +650,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
void cancelSearch() { void cancelSearch() {
if (searchWorker != null) { if (searchWorker != null) {
searchWorker.cancel(true); searchWorker.cancel(true);
searchWorker = null;
} }
} }
@ -750,7 +750,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) { || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) {
shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems); shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems);
} }
} }
} catch (NoCurrentCaseException notUsed) { } catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing. // Case is closed, do nothing.

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -260,7 +260,6 @@ public final class DiscoveryTopComponent extends TopComponent {
private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed
close(); close();
final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance(); final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance();
discDialog.cancelSearch();
discDialog.setVisible(true); discDialog.setVisible(true);
discDialog.validateDialog(); discDialog.validateDialog();
}//GEN-LAST:event_newSearchButtonActionPerformed }//GEN-LAST:event_newSearchButtonActionPerformed

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -36,6 +36,8 @@ import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter; import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.Result; import org.sleuthkit.autopsy.discovery.search.Result;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/** /**
* SwingWorker to retrieve the contents of a page. * SwingWorker to retrieve the contents of a page.
@ -87,7 +89,7 @@ final class PageWorker extends SwingWorker<Void, Void> {
@Override @Override
protected Void doInBackground() throws Exception { protected Void doInBackground() throws Exception {
SearchContext context = new SwingWorkerSearchContext(this);
try { try {
// Run the search // Run the search
if (resultType == SearchData.Type.DOMAIN) { if (resultType == SearchData.Type.DOMAIN) {
@ -96,17 +98,22 @@ final class PageWorker extends SwingWorker<Void, Void> {
groupingAttribute, groupingAttribute,
groupSort, groupSort,
fileSortMethod, groupKey, startingEntry, pageSize, fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo)); Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
} else { } else {
results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters, results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters,
groupingAttribute, groupingAttribute,
groupSort, groupSort,
fileSortMethod, groupKey, startingEntry, pageSize, fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo)); Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
} }
} catch (DiscoveryException ex) { } catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex); logger.log(Level.SEVERE, "Error running file search test", ex);
cancel(true); cancel(true);
} catch (SearchCancellationException ex) {
//The user does not explicitly have a way to cancel the loading of a page
//but they could have cancelled the search during the loading of the first page
//So this may or may not be an issue depending on when this occurred.
logger.log(Level.WARNING, "Search was cancelled while retrieving data for results page with starting entry: " + startingEntry, ex);
} }
return null; return null;
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -35,6 +35,8 @@ import org.sleuthkit.autopsy.discovery.search.FileSearch;
import org.sleuthkit.autopsy.discovery.search.DiscoveryException; import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter; import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
import org.sleuthkit.autopsy.discovery.search.SearchData; import org.sleuthkit.autopsy.discovery.search.SearchData;
/** /**
@ -75,23 +77,28 @@ final class SearchWorker extends SwingWorker<Void, Void> {
protected Void doInBackground() throws Exception { protected Void doInBackground() throws Exception {
try { try {
// Run the search // Run the search
SearchContext context = new SwingWorkerSearchContext(this);
if (searchType == SearchData.Type.DOMAIN) { if (searchType == SearchData.Type.DOMAIN) {
DomainSearch domainSearch = new DomainSearch(); DomainSearch domainSearch = new DomainSearch();
results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr, groupingAttr,
groupSortAlgorithm, groupSortAlgorithm,
fileSort, fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
} else { } else {
results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr, groupingAttr,
groupSortAlgorithm, groupSortAlgorithm,
fileSort, fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
} }
} catch (DiscoveryException ex) { } catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex); logger.log(Level.SEVERE, "Error running file search test.", ex);
cancel(true); cancel(true);
} catch (SearchCancellationException ex) {
//search cancellation exceptions should indicate that the user chose to cancell this search
//so would not be a problem but we might be curious what was being done when it was cancelled
logger.log(Level.INFO, "Discovery search was cancelled.", ex);
} }
return null; return null;
} }

View File

@ -0,0 +1,45 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.ui;
import javax.swing.SwingWorker;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/**
* Implementation of SearchContext for searches being performed in the
* background thread of a SwingWorker.
*/
class SwingWorkerSearchContext implements SearchContext {
private final SwingWorker<Void, Void> searchWorker;
/**
* Construct a new SwingWorkerSearchContext.
*
* @param worker The SwingWorker the search is being performed in.
*/
SwingWorkerSearchContext(SwingWorker<Void, Void> worker) {
searchWorker = worker;
}
@Override
public boolean searchIsCancelled() {
return searchWorker.isCancelled();
}
}

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -52,7 +52,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(), new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME, Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -88,7 +88,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(), new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME, Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -121,7 +121,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(), new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME, Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DATA_SOURCE, ResultsSorter.SortingMethod.BY_DATA_SOURCE,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -155,7 +155,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(), new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_SIZE, Group.GroupSortingAlgorithm.BY_GROUP_SIZE,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -173,4 +173,5 @@ public class DomainSearchCacheLoaderTest {
} }
} }
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.junit.Test; import org.junit.Test;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey; import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey;
@ -46,11 +45,11 @@ public class DomainSearchTest {
); );
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null, Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null); new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue()); assertEquals(4, sizes.get(groupOne).longValue());
} }
@ -81,11 +80,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null, Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null); new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue()); assertEquals(4, sizes.get(groupOne).longValue());
assertEquals(3, sizes.get(groupTwo).longValue()); assertEquals(3, sizes.get(groupTwo).longValue());
assertEquals(1, sizes.get(groupThree).longValue()); assertEquals(1, sizes.get(groupThree).longValue());
@ -95,11 +94,11 @@ public class DomainSearchTest {
public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException { public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>()); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(new HashMap<>());
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null, Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null); new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(0, sizes.size()); assertEquals(0, sizes.size());
} }
@ -120,17 +119,17 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size()); assertEquals(3, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i)); assertEquals(domains.get(i), firstPage.get(i));
} }
} }
@Test @Test
public void getDomains_SingleGroupOverSizedPage_ShouldContainAllDomains() throws DiscoveryException { public void getDomains_SingleGroupOverSizedPage_ShouldContainAllDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -148,17 +147,17 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null, new TestSearchContextImpl(false));
assertEquals(4, firstPage.size()); assertEquals(4, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i)); assertEquals(domains.get(i), firstPage.get(i));
} }
} }
@Test @Test
public void getDomains_SingleGroupHalfPage_ShouldContainHalfDomains() throws DiscoveryException { public void getDomains_SingleGroupHalfPage_ShouldContainHalfDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -176,18 +175,18 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size()); assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i)); assertEquals(domains.get(i), firstPage.get(i));
} }
} }
@Test @Test
public void getDomains_SingleGroupLastPageLastDomain_ShouldContainLastDomain() throws DiscoveryException { public void getDomains_SingleGroupLastPageLastDomain_ShouldContainLastDomain() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -204,15 +203,15 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null); new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null, new TestSearchContextImpl(false));
assertEquals(1, firstPage.size()); assertEquals(1, firstPage.size());
assertEquals(domains.get(domains.size() - 1), firstPage.get(0)); assertEquals(domains.get(domains.size() - 1), firstPage.get(0));
} }
@Test @Test
public void getDomains_SingleGroupOversizedOffset_ShouldContainNoDomains() throws DiscoveryException { public void getDomains_SingleGroupOversizedOffset_ShouldContainNoDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -230,14 +229,14 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null); new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size()); assertEquals(0, firstPage.size());
} }
@Test @Test
public void getDomains_SingleGroupZeroSizedPage_ShouldContainNoDomains() throws DiscoveryException { public void getDomains_SingleGroupZeroSizedPage_ShouldContainNoDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -255,14 +254,14 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size()); assertEquals(0, firstPage.size());
} }
@Test @Test
public void getDomains_MultipleGroupsFullPage_ShouldContainAllDomainsInGroup() throws DiscoveryException { public void getDomains_MultipleGroupsFullPage_ShouldContainAllDomainsInGroup() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -290,14 +289,14 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size()); assertEquals(3, firstPage.size());
} }
@Test @Test
public void getDomains_MultipleGroupsHalfPage_ShouldContainHalfDomainsInGroup() throws DiscoveryException { public void getDomains_MultipleGroupsHalfPage_ShouldContainHalfDomainsInGroup() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -325,17 +324,17 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null); new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size()); assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i)); assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i));
} }
} }
@Test @Test
public void getDomains_SingleGroupSimulatedPaging_ShouldPageThroughAllDomains() throws DiscoveryException { public void getDomains_SingleGroupSimulatedPaging_ShouldPageThroughAllDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
@ -357,20 +356,20 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
int start = 0; int start = 0;
int size = 2; int size = 2;
while (start + size <= domains.size()) { while (start + size <= domains.size()) {
List<Result> page = domainSearch.getDomainsInGroup(null, List<Result> page = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, start, size, null, null); new ArrayList<>(), null, null, null, groupOne, start, size, null, null, new TestSearchContextImpl(false));
assertEquals(2, page.size()); assertEquals(2, page.size());
for(int i = 0; i < page.size(); i++) { for (int i = 0; i < page.size(); i++) {
assertEquals(domains.get(start + i), page.get(i)); assertEquals(domains.get(start + i), page.get(i));
} }
start += size; start += size;
} }
} }
@ -379,7 +378,7 @@ public class DomainSearchTest {
private final String name; private final String name;
public DummyKey(String name) { DummyKey(String name) {
this.name = name; this.name = name;
} }

View File

@ -0,0 +1,37 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Implementation of SearchContext for testing to ensure NPEs are not thrown and
* the context indicates the expected cancellation status.
*/
public class TestSearchContextImpl implements SearchContext {
private final boolean isCancelled;
public TestSearchContextImpl(boolean hasBeenCancelled) {
isCancelled = hasBeenCancelled;
}
@Override
public boolean searchIsCancelled() {
return isCancelled;
}
}