diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java b/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java index bb2e258f2e..eed8a106f4 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/AbstractFilter.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -54,14 +54,18 @@ public abstract class AbstractFilter { * @param caseDb The case database * @param centralRepoDb The central repo database. Can be null if the * filter does not require it. + * @param context The SearchContext the search which is applying this + * filter is being performed from. * * @return The list of results that match this filter (and any that came * before it) * * @throws DiscoveryException + * @throws SearchCancellationException Thrown when the user has cancelled + * the search. */ public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { return new ArrayList<>(); } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java index 16e0e80f1b..c1436ff890 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryAttributes.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -78,10 +78,14 @@ public class DiscoveryAttributes { * @param caseDb The case database. * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search which is applying + * this filter is being performed from. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. */ - public void addAttributeToResults(List results, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + public void addAttributeToResults(List results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Default is to do nothing } } @@ -154,10 +158,13 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { try { - Map> domainsToCategories = getDomainsWithWebCategories(caseDb); + Map> domainsToCategories = getDomainsWithWebCategories(caseDb, context); for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Domain Category Attribute was being added."); + } if (result instanceof ResultDomain) { ResultDomain domain = (ResultDomain) result; domain.addWebCategories(domainsToCategories.get(domain.getDomain())); @@ -172,14 +179,29 @@ public class DiscoveryAttributes { * Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to * the category name attribute. Each ResultDomain is then parsed and * matched against this map of values. + * + * @param caseDb The case database. + * @param context The SearchContext the search which is applying this + * filter is being performed from. + * + * @return domainToCategory - A map of the domain names to the category + * name attribute they are classified as. + * + * @throws TskCoreException + * @throws InterruptedException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. */ - private Map> getDomainsWithWebCategories(SleuthkitCase caseDb) throws TskCoreException, InterruptedException { + private Map> getDomainsWithWebCategories(SleuthkitCase caseDb, SearchContext context) throws TskCoreException, InterruptedException, SearchCancellationException { Map> domainToCategory = new HashMap<>(); for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } + if (context.searchIsCancelled()) { + throw new SearchCancellationException("Search was cancelled while getting domains for artifact type: " + artifact.getDisplayName()); + } BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME)); BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN)); if (webCategory != null && domain != null) { @@ -206,14 +228,16 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, keyword list name) for all files in the list of files that have // keyword list hits. String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(), BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); - SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Keyword List Attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -278,8 +302,20 @@ public class DiscoveryAttributes { * Example: query for notable status of google.com. Result: notable With * this map, all domain instances that represent google.com can be updated * after one simple lookup. + * + * @param domainsBatch The list of ResultDomains to organize. + * @param attributeType The type of correlation attribute being organized. + * @param context The SearchContext the search which is applying this + * filter is being performed from. + * + * @return resultDomainTable - A map of the normalized domain name to the + * list of ResultDomain objects which are part of that normalized + * domain. + * + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static Map> organizeByValue(List domainsBatch, CorrelationAttributeInstance.Type attributeType) { + private static Map> organizeByValue(List domainsBatch, CorrelationAttributeInstance.Type attributeType, SearchContext context) throws SearchCancellationException { final Map> resultDomainTable = new HashMap<>(); for (ResultDomain domainInstance : domainsBatch) { try { @@ -288,6 +324,9 @@ public class DiscoveryAttributes { final List bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>()); bucket.add(domainInstance); resultDomainTable.put(normalizedDomain, bucket); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("Search was cancelled while orgainizing domains by their normalized value."); + } } catch (CorrelationAttributeNormalizationException ex) { logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain())); } @@ -322,39 +361,73 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (centralRepoDb != null) { - processFilesWithCr(results, centralRepoDb); + processFilesWithCr(results, centralRepoDb, context); } } - private void processFilesWithCr(List results, CentralRepository centralRepo) throws DiscoveryException { + /** + * Helper method to batch the domain results and check for notability. + * + * @param results The results which are being checked for previously + * being notable in the CR. + * @param centralRepo The central repository being used to check for + * notability. + * @param context The SearchContext the search which is applying + * this filter is being performed from. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. + */ + private void processFilesWithCr(List results, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException { List domainsBatch = new ArrayList<>(); for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Previously Notable attribute was being calculated with the CR."); + } if (result.getType() == SearchData.Type.DOMAIN) { domainsBatch.add((ResultDomain) result); if (domainsBatch.size() == DOMAIN_BATCH_SIZE) { - queryPreviouslyNotable(domainsBatch, centralRepo); + queryPreviouslyNotable(domainsBatch, centralRepo, context); domainsBatch.clear(); } } } - queryPreviouslyNotable(domainsBatch, centralRepo); + queryPreviouslyNotable(domainsBatch, centralRepo, context); } - private void queryPreviouslyNotable(List domainsBatch, CentralRepository centralRepo) throws DiscoveryException { + /** + * Helper method to check a batch of domains for notability. + * + * + * @param domainsBatch The list of ResultDomains to check for + * notability. + * @param centralRepo The central repository being used to check for + * notability. + * @param context The SearchContext the search which is applying + * this filter is being performed from. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. + */ + private void queryPreviouslyNotable(List domainsBatch, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException { if (domainsBatch.isEmpty()) { return; } try { final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); - final Map> resultDomainTable = organizeByValue(domainsBatch, attributeType); + final Map> resultDomainTable = organizeByValue(domainsBatch, attributeType, context); final String values = createCSV(resultDomainTable.keySet()); - + if (context.searchIsCancelled()) { + throw new SearchCancellationException("Search was cancelled while checking for previously notable domains."); + } final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String domainFrequencyQuery = " value AS domain_name " + "FROM " + tableName + " " @@ -421,7 +494,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (centralRepoDb == null) { for (Result result : results) { if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) { @@ -429,7 +502,7 @@ public class DiscoveryAttributes { } } } else { - processResultFilesForCR(results, centralRepoDb); + processResultFilesForCR(results, centralRepoDb, context); } } @@ -437,16 +510,26 @@ public class DiscoveryAttributes { * Private helper method for adding Frequency attribute when CR is * enabled. * - * @param files The list of ResultFiles to caluclate frequency - * for. - * @param centralRepoDb The central repository currently in use. + * @param results The results which are having their frequency + * checked. + * @param centralRepoDb The central repository being used to check + * frequency. + * @param context The SearchContext the search which is applying + * this filter is being performed from. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has + * cancelled the search. */ private void processResultFilesForCR(List results, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { List currentFiles = new ArrayList<>(); Set hashesToLookUp = new HashSet<>(); List domainsToQuery = new ArrayList<>(); for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Frequency attribute was being calculated with the CR."); + } // If frequency was already calculated, skip... if (result.getFrequency() == SearchData.Frequency.UNKNOWN) { if (result.getKnown() == TskData.FileKnown.KNOWN) { @@ -462,7 +545,7 @@ public class DiscoveryAttributes { } if (hashesToLookUp.size() >= BATCH_SIZE) { - computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); + computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context); hashesToLookUp.clear(); currentFiles.clear(); @@ -470,16 +553,15 @@ public class DiscoveryAttributes { } else { domainsToQuery.add((ResultDomain) result); if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) { - queryDomainFrequency(domainsToQuery, centralRepoDb); - + queryDomainFrequency(domainsToQuery, centralRepoDb, context); domainsToQuery.clear(); } } } } - queryDomainFrequency(domainsToQuery, centralRepoDb); - computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); + queryDomainFrequency(domainsToQuery, centralRepoDb, context); + computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context); } } @@ -487,17 +569,22 @@ public class DiscoveryAttributes { * Query to get the frequency of a domain. * * @param domainsToQuery List of domains to check the frequency of. - * @param centralRepository The central repository to query. + * @param centralRepository The central repository being used to check + * frequency. + * @param context The SearchContext the search which is applying + * this filter is being performed from. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static void queryDomainFrequency(List domainsToQuery, CentralRepository centralRepository) throws DiscoveryException { + private static void queryDomainFrequency(List domainsToQuery, CentralRepository centralRepository, SearchContext context) throws DiscoveryException, SearchCancellationException { if (domainsToQuery.isEmpty()) { return; } try { final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); - final Map> resultDomainTable = organizeByValue(domainsToQuery, attributeType); + final Map> resultDomainTable = organizeByValue(domainsToQuery, attributeType, context); final String values = createCSV(resultDomainTable.keySet()); final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM" @@ -508,8 +595,11 @@ public class DiscoveryAttributes { + ")) AS foo GROUP BY value"; final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable); - centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback); + centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR."); + } if (frequencyCallback.getCause() != null) { throw frequencyCallback.getCause(); } @@ -620,7 +710,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, hash set name) for all files in the list of files that have // hash set hits. @@ -628,6 +718,9 @@ public class DiscoveryAttributes { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); HashSetNamesCallback callback = new HashSetNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Hash Hit attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -695,7 +788,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, interesting item set name) for all files in the list of files that have // interesting file set hits. @@ -703,6 +796,9 @@ public class DiscoveryAttributes { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Interesting Item attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -808,7 +904,7 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Get pairs of (object ID, object type name) for all files in the list of files that have // objects detected @@ -816,6 +912,9 @@ public class DiscoveryAttributes { BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID()); ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results); + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Object Detected attribute was being added."); + } try { caseDb.getCaseDbAccessManager().select(selectQuery, callback); } catch (TskCoreException ex) { @@ -884,10 +983,13 @@ public class DiscoveryAttributes { @Override public void addAttributeToResults(List results, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { try { for (Result result : results) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while File Tag attribute was being added."); + } if (result.getType() == SearchData.Type.DOMAIN) { return; } @@ -995,14 +1097,20 @@ public class DiscoveryAttributes { } /** + * * Computes the CR frequency of all the given hashes and updates the list of * files. * * @param hashesToLookUp Hashes to find the frequency of. * @param currentFiles List of files to update with frequencies. * @param centralRepoDb The central repository being used. + * @param context The SearchContext the search which is applying this + * filter is being performed from. + * + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static void computeFrequency(Set hashesToLookUp, List currentFiles, CentralRepository centralRepoDb) { + private static void computeFrequency(Set hashesToLookUp, List currentFiles, CentralRepository centralRepoDb, SearchContext context) throws SearchCancellationException { if (hashesToLookUp.isEmpty()) { return; @@ -1022,7 +1130,9 @@ public class DiscoveryAttributes { FrequencyCallback callback = new FrequencyCallback(currentFiles); centralRepoDb.processSelectClause(selectClause, callback); - + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR."); + } } catch (CentralRepoException ex) { logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java index c201414199..ca1828d02c 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DiscoveryKeyUtils.java @@ -59,6 +59,7 @@ public class DiscoveryKeyUtils { private final List filters; private final SleuthkitCase sleuthkitCase; private final CentralRepository centralRepository; + private final SearchContext context; /** * Construct a new SearchKey with all information that defines a search. @@ -70,16 +71,20 @@ public class DiscoveryKeyUtils { * @param sortingMethod The method to sort the results by. * @param sleuthkitCase The SleuthkitCase being searched. * @param centralRepository The Central Repository being searched. + * @param context The SearchContext which reflects the search + * being performed to get results for this + * key. */ SearchKey(String userName, List filters, DiscoveryAttributes.AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod sortingMethod, - SleuthkitCase sleuthkitCase, CentralRepository centralRepository) { + SleuthkitCase sleuthkitCase, CentralRepository centralRepository, SearchContext context) { this.groupAttributeType = groupAttributeType; this.groupSortingType = groupSortingType; this.sortingMethod = sortingMethod; this.filters = filters; + this.context = context; StringBuilder searchStringBuilder = new StringBuilder(); searchStringBuilder.append(userName); @@ -93,8 +98,8 @@ public class DiscoveryKeyUtils { } /** - * Construct a SearchKey without a SleuthkitCase or CentralRepositry - * instance. + * Construct a SearchKey without a SearchContext, SleuthkitCase or + * CentralRepositry instance. * * @param userName The name of the user performing the search. * @param filters The Filters being used for the search. @@ -107,7 +112,8 @@ public class DiscoveryKeyUtils { Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod sortingMethod) { this(userName, filters, groupAttributeType, groupSortingType, - sortingMethod, null, null); + sortingMethod, null, null, null); + //this constructor should only be used putting things directly into a map or getting if present since casedb, cr, and search context will be null } @Override @@ -141,6 +147,23 @@ public class DiscoveryKeyUtils { return hash; } + /** + * Get the SearchContext for the search this key is being used in. + * + * @return The SearchContext the search key is being used in. + * + * @throws DiscoveryException Thrown when the key being used has a null + * context indicating it was not created with + * knowledge of the case or central + * repository databases. + */ + SearchContext getContext() throws DiscoveryException { + if (context == null) { + throw new DiscoveryException("The key in use was created without a context and does not support retrieving information from the databases."); + } + return context; + } + /** * Get the String representation of this key. * diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java index eb52327d45..0557808758 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearch.java @@ -78,24 +78,31 @@ public class DomainSearch { * @param caseDb The case database. * @param centralRepoDb The central repository database. Can be null * if not needed. + * @param context The SearchContext the search is being performed from. * * @return A LinkedHashMap grouped and sorted according to the parameters. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ public Map getGroupSizes(String userName, List filters, DiscoveryAttributes.AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod domainSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { final Map> searchResults = searchCache.get( userName, filters, groupAttributeType, groupSortingType, - domainSortingMethod, caseDb, centralRepoDb); + domainSortingMethod, caseDb, centralRepoDb, context); + // Transform the cached results into a map of group key to group size. final LinkedHashMap groupSizes = new LinkedHashMap<>(); for (GroupKey groupKey : searchResults.keySet()) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated"); + } groupSizes.put(groupKey, searchResults.get(groupKey).size()); } @@ -130,11 +137,11 @@ public class DomainSearch { Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod domainSortingMethod, GroupKey groupKey, int startingEntry, int numberOfEntries, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { final Map> searchResults = searchCache.get( userName, filters, groupAttributeType, groupSortingType, - domainSortingMethod, caseDb, centralRepoDb); + domainSortingMethod, caseDb, centralRepoDb, context); final List domainsInGroup = searchResults.get(groupKey); final List page = new ArrayList<>(); for (int i = startingEntry; (i < startingEntry + numberOfEntries) diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java index 306a66b287..ee677a6972 100755 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCache.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -55,20 +55,24 @@ class DomainSearchCache { * @param caseDb The case database. * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return Domain search results matching the given parameters. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ Map> get(String userName, List filters, DiscoveryAttributes.AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod domainSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { try { final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, - groupSortingType, domainSortingMethod, caseDb, centralRepoDb); + groupSortingType, domainSortingMethod, caseDb, centralRepoDb, context); return cache.get(searchKey); } catch (ExecutionException ex) { throw new DiscoveryException("Error fetching results from cache", ex.getCause()); diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java index db2ada61de..6a302fdab1 100755 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoader.java @@ -73,7 +73,7 @@ class DomainSearchCacheLoader extends CacheLoader sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -62,17 +62,21 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return The raw search results * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ static SearchResults runFileSearchDebug(String userName, List filters, AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod fileSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Make a list of attributes that we want to add values for. This ensures the // ResultFile objects will have all needed fields set when it's time to group // and sort them. For example, if we're grouping by central repo frequency, we need @@ -82,10 +86,10 @@ public class FileSearch { attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); // Run the queries for each filter - List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); + List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context); // Add the data to resultFiles for any attributes needed for sorting and grouping - addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); + addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context); // Collect everything in the search results SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); @@ -114,21 +118,28 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return A LinkedHashMap grouped and sorted according to the parameters * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ public static Map getGroupSizes(String userName, List filters, AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod fileSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { Map> searchResults = runFileSearch(userName, filters, - groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); + groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context); LinkedHashMap groupSizes = new LinkedHashMap<>(); for (GroupKey groupKey : searchResults.keySet()) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated"); + } groupSizes.put(groupKey, searchResults.get(groupKey).size()); } return groupSizes; @@ -151,10 +162,14 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return A LinkedHashMap grouped and sorted according to the parameters * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ public static List getFilesInGroup(String userName, List filters, @@ -164,7 +179,7 @@ public class FileSearch { GroupKey groupKey, int startingEntry, int numberOfEntries, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { //the group should be in the cache at this point List filesInGroup = null; SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod); @@ -178,7 +193,7 @@ public class FileSearch { List page = new ArrayList<>(); if (filesInGroup == null) { logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey); - runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); + runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context); synchronized (searchCache) { resultsMap = searchCache.getIfPresent(searchKey.getKeyString()); } @@ -218,7 +233,6 @@ public class FileSearch { TextSummarizer localSummarizer; synchronized (searchCache) { localSummarizer = SummaryHelpers.getLocalSummarizer(); - } if (localSummarizer != null) { try { @@ -247,17 +261,21 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if * not needed. + * @param context The SearchContext the search is being performed + * from. * * @return A LinkedHashMap grouped and sorted according to the parameters * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static Map> runFileSearch(String userName, + public static Map> runFileSearch(String userName, List filters, AttributeType groupAttributeType, Group.GroupSortingAlgorithm groupSortingType, ResultsSorter.SortingMethod fileSortingMethod, - SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Make a list of attributes that we want to add values for. This ensures the // ResultFile objects will have all needed fields set when it's time to group @@ -268,10 +286,10 @@ public class FileSearch { attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); // Run the queries for each filter - List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); + List results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context); // Add the data to resultFiles for any attributes needed for sorting and grouping - addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); + addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context); // Collect everything in the search results SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); @@ -295,13 +313,17 @@ public class FileSearch { * @param caseDb The case database * @param centralRepoDb The central repository database. Can be null if not * needed. + * @param context The SearchContext the search is being performed + * from. * * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static void addAttributes(List attrs, List results, SleuthkitCase caseDb, CentralRepository centralRepoDb) - throws DiscoveryException { + private static void addAttributes(List attrs, List results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) + throws DiscoveryException, SearchCancellationException { for (AttributeType attr : attrs) { - attr.addAttributeToResults(results, caseDb, centralRepoDb); + attr.addAttributeToResults(results, caseDb, centralRepoDb, context); } } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchCancellationException.java b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchCancellationException.java new file mode 100644 index 0000000000..2587777382 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchCancellationException.java @@ -0,0 +1,40 @@ +/* + * Autopsy + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.search; + +import java.util.concurrent.CancellationException; + +/** + * Exception to be thrown when the search has been intentionally cancelled to + * provide information on where the code was when the cancellation took place. + */ +public class SearchCancellationException extends CancellationException { + + private static final long serialVersionUID = 1L; + + /** + * Construct a new SearchCancellationException with the specified message. + * + * @param message The text to use as the message for the exception. + */ + SearchCancellationException(String message) { + super(message); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchContext.java b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchContext.java new file mode 100644 index 0000000000..e0ce318b58 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchContext.java @@ -0,0 +1,33 @@ +/* + * Autopsy + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.search; + +/** + * Interface for providing feedback on if a search has been cancelled. + * + */ +public interface SearchContext { + + /** + * Returns true if the search has been cancelled, false otherwise. + * + * @return True if the search has been cancelled, false otherwise. + */ + boolean searchIsCancelled(); +} diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java index 29a4dd698c..eba1402ccf 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/SearchFiltering.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -60,10 +60,16 @@ public class SearchFiltering { * @param caseDb The case database. * @param centralRepoDb The central repo. Can be null as long as no filters * need it. + * @param context The SearchContext the search is being performed + * from. * * @return List of Results from the search performed. + * + * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - static List runQueries(List filters, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { + static List runQueries(List filters, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (caseDb == null) { throw new DiscoveryException("Case DB parameter is null"); // NON-NLS } @@ -82,8 +88,11 @@ public class SearchFiltering { // The file search filter is required, so this should never be empty. throw new DiscoveryException("Selected filters do not include a case database query"); } + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled before result list could be retrieved."); + } try { - return getResultList(filters, combinedQuery, caseDb, centralRepoDb); + return getResultList(filters, combinedQuery, caseDb, centralRepoDb, context); } catch (TskCoreException ex) { throw new DiscoveryException("Error querying case database", ex); // NON-NLS } @@ -97,17 +106,23 @@ public class SearchFiltering { * @param caseDb The case database. * @param centralRepoDb The central repo. Can be null as long as no filters * need it. + * @param context The SearchContext the search is being performed + * from. * * @return An ArrayList of Results returned by the query. * * @throws TskCoreException * @throws DiscoveryException + * @throws SearchCancellationException - Thrown when the user has cancelled + * the search. */ - private static List getResultList(List filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws TskCoreException, DiscoveryException { + private static List getResultList(List filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws TskCoreException, DiscoveryException, SearchCancellationException { // Get all matching abstract files List resultList = new ArrayList<>(); List sqlResults = caseDb.findAllFilesWhere(combinedQuery); - + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while the case database query was being performed."); + } // If there are no results, return now if (sqlResults.isEmpty()) { return resultList; @@ -120,8 +135,11 @@ public class SearchFiltering { // Now run any non-SQL filters. for (AbstractFilter filter : filters) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while alternate filters were being applied."); + } if (filter.useAlternateFilter()) { - resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb); + resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb, context); } // There are no matches for the filters run so far, so return if (resultList.isEmpty()) { @@ -227,7 +245,7 @@ public class SearchFiltering { public Collection getTypes() { return Collections.unmodifiableCollection(types); } - + private StringJoiner joinStandardArtifactTypes() { StringJoiner joiner = new StringJoiner(","); for (ARTIFACT_TYPE type : types) { @@ -241,9 +259,10 @@ public class SearchFiltering { StringJoiner joiner = joinStandardArtifactTypes(); return "artifact_type_id IN (" + joiner + ")"; } - + /** - * Used by backend domain search code to query for additional artifact types. + * Used by backend domain search code to query for additional artifact + * types. */ String getWhereClause(List nonVisibleArtifactTypesToInclude) { StringJoiner joiner = joinStandardArtifactTypes(); @@ -674,14 +693,17 @@ public class SearchFiltering { @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { // Set the frequency for each file DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute(); - freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); + freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context); // If the frequency matches the filter, add the file to the results List frequencyResults = new ArrayList<>(); for (Result file : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Frequency alternate filter was being applied."); + } if (frequencies.contains(file.getFrequency())) { frequencyResults.add(file); } @@ -705,7 +727,7 @@ public class SearchFiltering { return Bundle.SearchFiltering_FrequencyFilter_desc(desc); } } - + /** * A filter for domains with known account types. */ @@ -715,17 +737,20 @@ public class SearchFiltering { public String getWhereClause() { throw new UnsupportedOperationException("Not supported, this is an alternative filter."); } - + @Override public boolean useAlternateFilter() { return true; } - + @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { List filteredResults = new ArrayList<>(); for (Result result : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Known Account Type alternate filter was being applied."); + } if (result instanceof ResultDomain) { ResultDomain domain = (ResultDomain) result; if (domain.hasKnownAccountType()) { @@ -745,9 +770,9 @@ public class SearchFiltering { public String getDesc() { return Bundle.SearchFiltering_KnownAccountTypeFilter_desc(); } - + } - + /** * A filter for previously notable content in the central repository. */ @@ -757,19 +782,22 @@ public class SearchFiltering { public String getWhereClause() { throw new UnsupportedOperationException("Not supported, this is an alternative filter."); } - + @Override public boolean useAlternateFilter() { return true; } - + @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute(); - previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); + previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context); List filteredResults = new ArrayList<>(); for (Result file : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Previously Notable alternate filter was being applied."); + } if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) { filteredResults.add(file); } @@ -784,7 +812,7 @@ public class SearchFiltering { public String getDesc() { return Bundle.SearchFiltering_PreviouslyNotableFilter_desc(); } - + } /** @@ -1068,7 +1096,7 @@ public class SearchFiltering { @Override public List applyAlternateFilter(List currentResults, SleuthkitCase caseDb, - CentralRepository centralRepoDb) throws DiscoveryException { + CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException { if (centralRepoDb == null) { throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS @@ -1087,6 +1115,9 @@ public class SearchFiltering { CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID); for (Result result : currentResults) { + if (context.searchIsCancelled()) { + throw new SearchCancellationException("The search was cancelled while Notable alternate filter was being applied."); + } ResultFile file = (ResultFile) result; if (result.getType() == SearchData.Type.DOMAIN) { break; diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java index 450ad4d381..63bbf673a8 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryDialog.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +31,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; -import javax.swing.SwingUtilities; import org.apache.commons.lang.StringUtils; import org.openide.util.NbBundle.Messages; import org.openide.windows.WindowManager; @@ -574,7 +573,7 @@ final class DiscoveryDialog extends javax.swing.JDialog { } private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed - // Get the selected filters + setVisible(false); //set visible used here instead of dispose incase dispose code changes final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent(); if (tc == null) { setValid("No Top Component Found"); @@ -584,6 +583,7 @@ final class DiscoveryDialog extends javax.swing.JDialog { tc.open(); } tc.resetTopComponent(); + // Get the selected filters List filters; if (videosButton.isSelected()) { filters = videoFilterPanel.getFilters(); @@ -617,7 +617,6 @@ final class DiscoveryDialog extends javax.swing.JDialog { } searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort); searchWorker.execute(); - dispose(); tc.toFront(); tc.requestActive(); }//GEN-LAST:event_searchButtonActionPerformed @@ -651,6 +650,7 @@ final class DiscoveryDialog extends javax.swing.JDialog { void cancelSearch() { if (searchWorker != null) { searchWorker.cancel(true); + searchWorker = null; } } @@ -750,7 +750,6 @@ final class DiscoveryDialog extends javax.swing.JDialog { || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) { shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems); } - } } catch (NoCurrentCaseException notUsed) { // Case is closed, do nothing. diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java index a74b32ca26..c5dba98337 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/DiscoveryTopComponent.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -260,7 +260,6 @@ public final class DiscoveryTopComponent extends TopComponent { private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed close(); final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance(); - discDialog.cancelSearch(); discDialog.setVisible(true); discDialog.validateDialog(); }//GEN-LAST:event_newSearchButtonActionPerformed diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java index 491e618683..8718f60a74 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/PageWorker.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,6 +36,8 @@ import org.sleuthkit.autopsy.discovery.search.DiscoveryException; import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.ResultsSorter; import org.sleuthkit.autopsy.discovery.search.Result; +import org.sleuthkit.autopsy.discovery.search.SearchCancellationException; +import org.sleuthkit.autopsy.discovery.search.SearchContext; /** * SwingWorker to retrieve the contents of a page. @@ -87,7 +89,7 @@ final class PageWorker extends SwingWorker { @Override protected Void doInBackground() throws Exception { - + SearchContext context = new SwingWorkerSearchContext(this); try { // Run the search if (resultType == SearchData.Type.DOMAIN) { @@ -96,17 +98,22 @@ final class PageWorker extends SwingWorker { groupingAttribute, groupSort, fileSortMethod, groupKey, startingEntry, pageSize, - Case.getCurrentCase().getSleuthkitCase(), centralRepo)); + Case.getCurrentCase().getSleuthkitCase(), centralRepo, context)); } else { results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters, groupingAttribute, groupSort, fileSortMethod, groupKey, startingEntry, pageSize, - Case.getCurrentCase().getSleuthkitCase(), centralRepo)); + Case.getCurrentCase().getSleuthkitCase(), centralRepo, context)); } } catch (DiscoveryException ex) { logger.log(Level.SEVERE, "Error running file search test", ex); cancel(true); + } catch (SearchCancellationException ex) { + //The user does not explicitly have a way to cancel the loading of a page + //but they could have cancelled the search during the loading of the first page + //So this may or may not be an issue depending on when this occurred. + logger.log(Level.WARNING, "Search was cancelled while retrieving data for results page with starting entry: " + startingEntry, ex); } return null; } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java index 7c6863ce62..6ba7a75fd2 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/SearchWorker.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,6 +35,8 @@ import org.sleuthkit.autopsy.discovery.search.FileSearch; import org.sleuthkit.autopsy.discovery.search.DiscoveryException; import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.ResultsSorter; +import org.sleuthkit.autopsy.discovery.search.SearchCancellationException; +import org.sleuthkit.autopsy.discovery.search.SearchContext; import org.sleuthkit.autopsy.discovery.search.SearchData; /** @@ -75,23 +77,28 @@ final class SearchWorker extends SwingWorker { protected Void doInBackground() throws Exception { try { // Run the search + SearchContext context = new SwingWorkerSearchContext(this); if (searchType == SearchData.Type.DOMAIN) { DomainSearch domainSearch = new DomainSearch(); results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, groupingAttr, groupSortAlgorithm, fileSort, - Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); + Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context)); } else { results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, groupingAttr, groupSortAlgorithm, fileSort, - Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); + Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context)); } } catch (DiscoveryException ex) { - logger.log(Level.SEVERE, "Error running file search test", ex); + logger.log(Level.SEVERE, "Error running file search test.", ex); cancel(true); + } catch (SearchCancellationException ex) { + //search cancellation exceptions should indicate that the user chose to cancell this search + //so would not be a problem but we might be curious what was being done when it was cancelled + logger.log(Level.INFO, "Discovery search was cancelled.", ex); } return null; } diff --git a/Core/src/org/sleuthkit/autopsy/discovery/ui/SwingWorkerSearchContext.java b/Core/src/org/sleuthkit/autopsy/discovery/ui/SwingWorkerSearchContext.java new file mode 100644 index 0000000000..2d51d755f9 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/discovery/ui/SwingWorkerSearchContext.java @@ -0,0 +1,45 @@ +/* + * Autopsy + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.ui; + +import javax.swing.SwingWorker; +import org.sleuthkit.autopsy.discovery.search.SearchContext; + +/** + * Implementation of SearchContext for searches being performed in the + * background thread of a SwingWorker. + */ +class SwingWorkerSearchContext implements SearchContext { + + private final SwingWorker searchWorker; + + /** + * Construct a new SwingWorkerSearchContext. + * + * @param worker The SwingWorker the search is being performed in. + */ + SwingWorkerSearchContext(SwingWorker worker) { + searchWorker = worker; + } + + @Override + public boolean searchIsCancelled() { + return searchWorker.isCancelled(); + } +} diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java index 66a77cf617..b9eb3aeefc 100755 --- a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java +++ b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchCacheLoaderTest.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -52,7 +52,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.DataSourceAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -88,7 +88,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.NoGroupingAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -121,7 +121,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.NoGroupingAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_NAME, ResultsSorter.SortingMethod.BY_DATA_SOURCE, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -155,7 +155,7 @@ public class DomainSearchCacheLoaderTest { new DiscoveryAttributes.DataSourceAttribute(), Group.GroupSortingAlgorithm.BY_GROUP_SIZE, ResultsSorter.SortingMethod.BY_DOMAIN_NAME, - caseDb, null); + caseDb, null, new TestSearchContextImpl(false)); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); @@ -173,4 +173,5 @@ public class DomainSearchCacheLoaderTest { } } } + } diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java index 7dcffed663..2037bbf8b3 100755 --- a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java +++ b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/DomainSearchTest.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,7 +24,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Test; - import static org.mockito.Mockito.*; import static org.junit.Assert.*; import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey; @@ -46,11 +45,11 @@ public class DomainSearchTest { ); } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - Map sizes = domainSearch.getGroupSizes(null, - new ArrayList<>(), null, null, null, null, null); + Map sizes = domainSearch.getGroupSizes(null, + new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false)); assertEquals(4, sizes.get(groupOne).longValue()); } @@ -81,11 +80,11 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - Map sizes = domainSearch.getGroupSizes(null, - new ArrayList<>(), null, null, null, null, null); + Map sizes = domainSearch.getGroupSizes(null, + new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false)); assertEquals(4, sizes.get(groupOne).longValue()); assertEquals(3, sizes.get(groupTwo).longValue()); assertEquals(1, sizes.get(groupThree).longValue()); @@ -95,11 +94,11 @@ public class DomainSearchTest { public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>()); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(new HashMap<>()); DomainSearch domainSearch = new DomainSearch(cache, null, null); - Map sizes = domainSearch.getGroupSizes(null, - new ArrayList<>(), null, null, null, null, null); + Map sizes = domainSearch.getGroupSizes(null, + new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false)); assertEquals(0, sizes.size()); } @@ -120,17 +119,17 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false)); assertEquals(3, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(domains.get(i), firstPage.get(i)); } } - + @Test public void getDomains_SingleGroupOverSizedPage_ShouldContainAllDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -148,17 +147,17 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null, new TestSearchContextImpl(false)); assertEquals(4, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(domains.get(i), firstPage.get(i)); } } - + @Test public void getDomains_SingleGroupHalfPage_ShouldContainHalfDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -176,18 +175,18 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null, new TestSearchContextImpl(false)); assertEquals(2, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(domains.get(i), firstPage.get(i)); } } - - @Test + + @Test public void getDomains_SingleGroupLastPageLastDomain_ShouldContainLastDomain() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -204,15 +203,15 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null, new TestSearchContextImpl(false)); assertEquals(1, firstPage.size()); assertEquals(domains.get(domains.size() - 1), firstPage.get(0)); } - + @Test public void getDomains_SingleGroupOversizedOffset_ShouldContainNoDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -230,14 +229,14 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null, new TestSearchContextImpl(false)); assertEquals(0, firstPage.size()); } - + @Test public void getDomains_SingleGroupZeroSizedPage_ShouldContainNoDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -255,14 +254,14 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null, new TestSearchContextImpl(false)); assertEquals(0, firstPage.size()); } - + @Test public void getDomains_MultipleGroupsFullPage_ShouldContainAllDomainsInGroup() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -290,14 +289,14 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false)); assertEquals(3, firstPage.size()); } - + @Test public void getDomains_MultipleGroupsHalfPage_ShouldContainHalfDomainsInGroup() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -325,17 +324,17 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - List firstPage = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null); + List firstPage = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null, new TestSearchContextImpl(false)); assertEquals(2, firstPage.size()); for (int i = 0; i < firstPage.size(); i++) { assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i)); } } - + @Test public void getDomains_SingleGroupSimulatedPaging_ShouldPageThroughAllDomains() throws DiscoveryException { DomainSearchCache cache = mock(DomainSearchCache.class); @@ -357,20 +356,20 @@ public class DomainSearchTest { } }; - when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); + when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData); DomainSearch domainSearch = new DomainSearch(cache, null, null); - + int start = 0; int size = 2; while (start + size <= domains.size()) { - List page = domainSearch.getDomainsInGroup(null, - new ArrayList<>(), null, null, null, groupOne, start, size, null, null); + List page = domainSearch.getDomainsInGroup(null, + new ArrayList<>(), null, null, null, groupOne, start, size, null, null, new TestSearchContextImpl(false)); assertEquals(2, page.size()); - for(int i = 0; i < page.size(); i++) { + for (int i = 0; i < page.size(); i++) { assertEquals(domains.get(start + i), page.get(i)); } - + start += size; } } @@ -379,7 +378,7 @@ public class DomainSearchTest { private final String name; - public DummyKey(String name) { + DummyKey(String name) { this.name = name; } diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/TestSearchContextImpl.java b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/TestSearchContextImpl.java new file mode 100644 index 0000000000..128038eeed --- /dev/null +++ b/Core/test/unit/src/org/sleuthkit/autopsy/discovery/search/TestSearchContextImpl.java @@ -0,0 +1,37 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.discovery.search; + +/** + * Implementation of SearchContext for testing to ensure NPEs are not thrown and + * the context indicates the expected cancellation status. + */ +public class TestSearchContextImpl implements SearchContext { + + private final boolean isCancelled; + + public TestSearchContextImpl(boolean hasBeenCancelled) { + isCancelled = hasBeenCancelled; + } + + @Override + public boolean searchIsCancelled() { + return isCancelled; + } +}