Merge branch 'sleuthkit:develop' into develop

This commit is contained in:
Seb2lyon 2021-08-28 12:19:20 +02:00 committed by GitHub
commit 6cde752006
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 781 additions and 331 deletions

View File

@ -88,26 +88,9 @@ public final class OtherOccurrences {
if (osAccountAddr.isPresent()) { if (osAccountAddr.isPresent()) {
try { try {
for (OsAccountInstance instance : osAccount.getOsAccountInstances()) { for (OsAccountInstance instance : osAccount.getOsAccountInstances()) {
DataSource osAccountDataSource = instance.getDataSource(); CorrelationAttributeInstance correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttr(instance.getOsAccount(), instance.getDataSource());
try { if (correlationAttributeInstance != null) {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
osAccountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, instance.getDataSource()),
"",
"",
TskData.FileKnown.KNOWN,
osAccount.getId());
ret.add(correlationAttributeInstance); ret.add(correlationAttributeInstance);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", osAccountAddr.get()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, String.format("Exception while getting open case looking up osAccount %s.", osAccountAddr.get()), ex); //NON-NLS
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, String.format("Exception with Correlation Attribute Normalization for osAccount %s.", osAccountAddr.get()), ex); //NON-NLS
} }
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {

View File

@ -40,6 +40,8 @@ import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.InvalidAccountIDException; import org.sleuthkit.datamodel.InvalidAccountIDException;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.OsAccountInstance;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
@ -568,6 +570,51 @@ public class CorrelationAttributeUtil {
} }
} }
/**
* Makes a correlation attribute instance of a given type from an OS
* account. Checks address if it is null, or one of the ones always present
* on a windows system and thus not unique.
*
* @param osAccoun The OS account.
* @param dataSource The data source content object.
*
* @return The correlation attribute instance or null, if an error occurred.
*/
public static CorrelationAttributeInstance makeCorrAttr(OsAccount osAccount, Content dataSource) {
Optional<String> accountAddr = osAccount.getAddr();
// Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system
// and they are not unique
if (!accountAddr.isPresent() || accountAddr.get().equals("S-1-5-18") || accountAddr.get().equals("S-1-5-19") || accountAddr.get().equals("S-1-5-20")) {
return null;
}
try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
accountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource),
"",
"",
TskData.FileKnown.KNOWN,
osAccount.getId());
return correlationAttributeInstance;
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
return null;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return null;
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
return null;
}
}
/** /**
* Gets the correlation attribute instance for a file. * Gets the correlation attribute instance for a file.
* *

View File

@ -661,7 +661,8 @@ public final class CaseEventListener implements PropertyChangeListener {
"CaseEventsListener.prevCaseComment.text=Users seen in previous cases", "CaseEventsListener.prevCaseComment.text=Users seen in previous cases",
"CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"}) "CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"})
/** /**
* Add OsAccount Instance to CR and find interesting items based on the OsAccount * Add OsAccount Instance to CR and find interesting items based on the
* OsAccount
*/ */
private final class OsAccountInstancesAddedTask implements Runnable { private final class OsAccountInstancesAddedTask implements Runnable {
@ -687,27 +688,15 @@ public final class CaseEventListener implements PropertyChangeListener {
for (OsAccountInstance osAccountInstance : addedOsAccountNew) { for (OsAccountInstance osAccountInstance : addedOsAccountNew) {
try { try {
OsAccount osAccount = osAccountInstance.getOsAccount(); OsAccount osAccount = osAccountInstance.getOsAccount();
Optional<String> accountAddr = osAccount.getAddr(); CorrelationAttributeInstance correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttr(osAccount, osAccountInstance.getDataSource());
// Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system if (correlationAttributeInstance == null) {
// and they are not unique
if (!accountAddr.isPresent() || accountAddr.get().equals("S-1-5-18") || accountAddr.get().equals("S-1-5-19") || accountAddr.get().equals("S-1-5-20")) {
return; return;
} }
Optional<String> accountAddr = osAccount.getAddr();
try { try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
accountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, osAccountInstance.getDataSource()),
"",
"",
TskData.FileKnown.KNOWN,
osAccount.getId());
// Save to the database if requested // Save to the database if requested
if(IngestEventsListener.shouldCreateCrProperties()) { if (IngestEventsListener.shouldCreateCrProperties()) {
dbManager.addArtifactInstance(correlationAttributeInstance); dbManager.addArtifactInstance(correlationAttributeInstance);
} }
@ -740,14 +729,11 @@ public final class CaseEventListener implements PropertyChangeListener {
} }
} }
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
} catch (CorrelationAttributeNormalizationException ex) { } catch (CorrelationAttributeNormalizationException ex) {
LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex); LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex);
} }

View File

@ -40,6 +40,7 @@ import org.sleuthkit.datamodel.TskCoreException;
*/ */
@ServiceProvider(service = DataContentViewer.class, position = 7) @ServiceProvider(service = DataContentViewer.class, position = 7)
public class AnalysisResultsContentViewer implements DataContentViewer { public class AnalysisResultsContentViewer implements DataContentViewer {
private static final Logger logger = Logger.getLogger(AnalysisResultsContentPanel.class.getName()); private static final Logger logger = Logger.getLogger(AnalysisResultsContentPanel.class.getName());
// isPreferred value // isPreferred value
@ -50,8 +51,6 @@ public class AnalysisResultsContentViewer implements DataContentViewer {
private SwingWorker<?, ?> worker = null; private SwingWorker<?, ?> worker = null;
@NbBundle.Messages({ @NbBundle.Messages({
"AnalysisResultsContentViewer_title=Analysis Results" "AnalysisResultsContentViewer_title=Analysis Results"
}) })

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2012-2019 Basis Technology Corp. * Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -110,7 +110,7 @@ public class ImageNode extends AbstractContentNode<Image> {
actionsList.add(a); actionsList.add(a);
} }
actionsList.addAll(ExplorerNodeActionVisitor.getActions(content)); actionsList.addAll(ExplorerNodeActionVisitor.getActions(content));
actionsList.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text())); actionsList.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text(), content.getId()));
actionsList.add(new ViewSummaryInformationAction(content.getId())); actionsList.add(new ViewSummaryInformationAction(content.getId()));
actionsList.add(new RunIngestModulesAction(Collections.<Content>singletonList(content))); actionsList.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));
actionsList.add(new NewWindowViewAction(NbBundle.getMessage(this.getClass(), "ImageNode.getActions.viewInNewWin.text"), this)); actionsList.add(new NewWindowViewAction(NbBundle.getMessage(this.getClass(), "ImageNode.getActions.viewInNewWin.text"), this));

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2017-2019 Basis Technology Corp. * Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -65,7 +65,7 @@ public abstract class SpecialDirectoryNode extends AbstractAbstractFileNode<Spec
actions.add(ExtractAction.getInstance()); actions.add(ExtractAction.getInstance());
actions.add(ExportCSVAction.getInstance()); actions.add(ExportCSVAction.getInstance());
actions.add(null); // creates a menu separator actions.add(null); // creates a menu separator
actions.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text())); actions.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text(), content.getId()));
if (content.isDataSource()) { if (content.isDataSource()) {
actions.add(new ViewSummaryInformationAction(content.getId())); actions.add(new ViewSummaryInformationAction(content.getId()));
actions.add(new RunIngestModulesAction(Collections.<Content>singletonList(content))); actions.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011 Basis Technology Corp. * Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -28,14 +28,26 @@ import org.openide.util.Lookup;
*/ */
public class FileSearchAction extends AbstractAction { public class FileSearchAction extends AbstractAction {
private final Long dataSourceId;
public FileSearchAction(String title, long dataSourceID) {
super(title);
dataSourceId = dataSourceID;
}
public FileSearchAction(String title) { public FileSearchAction(String title) {
super(title); super(title);
dataSourceId = null;
} }
@Override @Override
public void actionPerformed(ActionEvent e) { public void actionPerformed(ActionEvent e) {
FileSearchProvider searcher = Lookup.getDefault().lookup(FileSearchProvider.class); FileSearchProvider searcher = Lookup.getDefault().lookup(FileSearchProvider.class);
if (dataSourceId == null) {
searcher.showDialog(); searcher.showDialog();
} else {
searcher.showDialog(dataSourceId);
}
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011 Basis Technology Corp. * Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -23,5 +23,8 @@ package org.sleuthkit.autopsy.directorytree;
*/ */
public interface FileSearchProvider { public interface FileSearchProvider {
public void showDialog(Long dataSourceID);
@Deprecated
public void showDialog(); public void showDialog();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -54,14 +54,18 @@ public abstract class AbstractFilter {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repo database. Can be null if the * @param centralRepoDb The central repo database. Can be null if the
* filter does not require it. * filter does not require it.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
* *
* @return The list of results that match this filter (and any that came * @return The list of results that match this filter (and any that came
* before it) * before it)
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException Thrown when the user has cancelled
* the search.
*/ */
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
return new ArrayList<>(); return new ArrayList<>();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -78,10 +78,14 @@ public class DiscoveryAttributes {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/ */
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Default is to do nothing // Default is to do nothing
} }
} }
@ -154,10 +158,13 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try { try {
Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb); Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb, context);
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain Category Attribute was being added.");
}
if (result instanceof ResultDomain) { if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result; ResultDomain domain = (ResultDomain) result;
domain.addWebCategories(domainsToCategories.get(domain.getDomain())); domain.addWebCategories(domainsToCategories.get(domain.getDomain()));
@ -172,14 +179,29 @@ public class DiscoveryAttributes {
* Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to * Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to
* the category name attribute. Each ResultDomain is then parsed and * the category name attribute. Each ResultDomain is then parsed and
* matched against this map of values. * matched against this map of values.
*
* @param caseDb The case database.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return domainToCategory - A map of the domain names to the category
* name attribute they are classified as.
*
* @throws TskCoreException
* @throws InterruptedException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/ */
private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb) throws TskCoreException, InterruptedException { private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb, SearchContext context) throws TskCoreException, InterruptedException, SearchCancellationException {
Map<String, Set<String>> domainToCategory = new HashMap<>(); Map<String, Set<String>> domainToCategory = new HashMap<>();
for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) { for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) {
if (Thread.currentThread().isInterrupted()) { if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException(); throw new InterruptedException();
} }
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while getting domains for artifact type: " + artifact.getDisplayName());
}
BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME)); BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME));
BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN)); BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN));
if (webCategory != null && domain != null) { if (webCategory != null && domain != null) {
@ -206,14 +228,16 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, keyword list name) for all files in the list of files that have // Get pairs of (object ID, keyword list name) for all files in the list of files that have
// keyword list hits. // keyword list hits.
String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(), String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(),
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results); SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Keyword List Attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -278,8 +302,20 @@ public class DiscoveryAttributes {
* Example: query for notable status of google.com. Result: notable With * Example: query for notable status of google.com. Result: notable With
* this map, all domain instances that represent google.com can be updated * this map, all domain instances that represent google.com can be updated
* after one simple lookup. * after one simple lookup.
*
* @param domainsBatch The list of ResultDomains to organize.
* @param attributeType The type of correlation attribute being organized.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return resultDomainTable - A map of the normalized domain name to the
* list of ResultDomain objects which are part of that normalized
* domain.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType) { private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType, SearchContext context) throws SearchCancellationException {
final Map<String, List<ResultDomain>> resultDomainTable = new HashMap<>(); final Map<String, List<ResultDomain>> resultDomainTable = new HashMap<>();
for (ResultDomain domainInstance : domainsBatch) { for (ResultDomain domainInstance : domainsBatch) {
try { try {
@ -288,6 +324,9 @@ public class DiscoveryAttributes {
final List<ResultDomain> bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>()); final List<ResultDomain> bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>());
bucket.add(domainInstance); bucket.add(domainInstance);
resultDomainTable.put(normalizedDomain, bucket); resultDomainTable.put(normalizedDomain, bucket);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while orgainizing domains by their normalized value.");
}
} catch (CorrelationAttributeNormalizationException ex) { } catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain())); logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain()));
} }
@ -322,39 +361,73 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb != null) { if (centralRepoDb != null) {
processFilesWithCr(results, centralRepoDb); processFilesWithCr(results, centralRepoDb, context);
} }
} }
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo) throws DiscoveryException { /**
* Helper method to batch the domain results and check for notability.
*
* @param results The results which are being checked for previously
* being notable in the CR.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultDomain> domainsBatch = new ArrayList<>(); List<ResultDomain> domainsBatch = new ArrayList<>();
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable attribute was being calculated with the CR.");
}
if (result.getType() == SearchData.Type.DOMAIN) { if (result.getType() == SearchData.Type.DOMAIN) {
domainsBatch.add((ResultDomain) result); domainsBatch.add((ResultDomain) result);
if (domainsBatch.size() == DOMAIN_BATCH_SIZE) { if (domainsBatch.size() == DOMAIN_BATCH_SIZE) {
queryPreviouslyNotable(domainsBatch, centralRepo); queryPreviouslyNotable(domainsBatch, centralRepo, context);
domainsBatch.clear(); domainsBatch.clear();
} }
} }
} }
queryPreviouslyNotable(domainsBatch, centralRepo); queryPreviouslyNotable(domainsBatch, centralRepo, context);
} }
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo) throws DiscoveryException { /**
* Helper method to check a batch of domains for notability.
*
*
* @param domainsBatch The list of ResultDomains to check for
* notability.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsBatch.isEmpty()) { if (domainsBatch.isEmpty()) {
return; return;
} }
try { try {
final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType); final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType, context);
final String values = createCSV(resultDomainTable.keySet()); final String values = createCSV(resultDomainTable.keySet());
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while checking for previously notable domains.");
}
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name " final String domainFrequencyQuery = " value AS domain_name "
+ "FROM " + tableName + " " + "FROM " + tableName + " "
@ -421,7 +494,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) { if (centralRepoDb == null) {
for (Result result : results) { for (Result result : results) {
if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) { if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) {
@ -429,7 +502,7 @@ public class DiscoveryAttributes {
} }
} }
} else { } else {
processResultFilesForCR(results, centralRepoDb); processResultFilesForCR(results, centralRepoDb, context);
} }
} }
@ -437,16 +510,26 @@ public class DiscoveryAttributes {
* Private helper method for adding Frequency attribute when CR is * Private helper method for adding Frequency attribute when CR is
* enabled. * enabled.
* *
* @param files The list of ResultFiles to caluclate frequency * @param results The results which are having their frequency
* for. * checked.
* @param centralRepoDb The central repository currently in use. * @param centralRepoDb The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/ */
private void processResultFilesForCR(List<Result> results, private void processResultFilesForCR(List<Result> results,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultFile> currentFiles = new ArrayList<>(); List<ResultFile> currentFiles = new ArrayList<>();
Set<String> hashesToLookUp = new HashSet<>(); Set<String> hashesToLookUp = new HashSet<>();
List<ResultDomain> domainsToQuery = new ArrayList<>(); List<ResultDomain> domainsToQuery = new ArrayList<>();
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency attribute was being calculated with the CR.");
}
// If frequency was already calculated, skip... // If frequency was already calculated, skip...
if (result.getFrequency() == SearchData.Frequency.UNKNOWN) { if (result.getFrequency() == SearchData.Frequency.UNKNOWN) {
if (result.getKnown() == TskData.FileKnown.KNOWN) { if (result.getKnown() == TskData.FileKnown.KNOWN) {
@ -462,7 +545,7 @@ public class DiscoveryAttributes {
} }
if (hashesToLookUp.size() >= BATCH_SIZE) { if (hashesToLookUp.size() >= BATCH_SIZE) {
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
hashesToLookUp.clear(); hashesToLookUp.clear();
currentFiles.clear(); currentFiles.clear();
@ -470,16 +553,15 @@ public class DiscoveryAttributes {
} else { } else {
domainsToQuery.add((ResultDomain) result); domainsToQuery.add((ResultDomain) result);
if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) { if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) {
queryDomainFrequency(domainsToQuery, centralRepoDb); queryDomainFrequency(domainsToQuery, centralRepoDb, context);
domainsToQuery.clear(); domainsToQuery.clear();
} }
} }
} }
} }
queryDomainFrequency(domainsToQuery, centralRepoDb); queryDomainFrequency(domainsToQuery, centralRepoDb, context);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb); computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
} }
} }
@ -487,17 +569,22 @@ public class DiscoveryAttributes {
* Query to get the frequency of a domain. * Query to get the frequency of a domain.
* *
* @param domainsToQuery List of domains to check the frequency of. * @param domainsToQuery List of domains to check the frequency of.
* @param centralRepository The central repository to query. * @param centralRepository The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository) throws DiscoveryException { private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsToQuery.isEmpty()) { if (domainsToQuery.isEmpty()) {
return; return;
} }
try { try {
final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID); final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType); final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType, context);
final String values = createCSV(resultDomainTable.keySet()); final String values = createCSV(resultDomainTable.keySet());
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType); final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM" final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM"
@ -508,8 +595,11 @@ public class DiscoveryAttributes {
+ ")) AS foo GROUP BY value"; + ")) AS foo GROUP BY value";
final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable); final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
if (frequencyCallback.getCause() != null) { if (frequencyCallback.getCause() != null) {
throw frequencyCallback.getCause(); throw frequencyCallback.getCause();
} }
@ -620,7 +710,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, hash set name) for all files in the list of files that have // Get pairs of (object ID, hash set name) for all files in the list of files that have
// hash set hits. // hash set hits.
@ -628,6 +718,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
HashSetNamesCallback callback = new HashSetNamesCallback(results); HashSetNamesCallback callback = new HashSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Hash Hit attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -695,7 +788,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, interesting item set name) for all files in the list of files that have // Get pairs of (object ID, interesting item set name) for all files in the list of files that have
// interesting file set hits. // interesting file set hits.
@ -703,6 +796,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results); InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Interesting Item attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -808,7 +904,7 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, object type name) for all files in the list of files that have // Get pairs of (object ID, object type name) for all files in the list of files that have
// objects detected // objects detected
@ -816,6 +912,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID()); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID());
ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results); ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Object Detected attribute was being added.");
}
try { try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback); caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
@ -884,10 +983,13 @@ public class DiscoveryAttributes {
@Override @Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try { try {
for (Result result : results) { for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while File Tag attribute was being added.");
}
if (result.getType() == SearchData.Type.DOMAIN) { if (result.getType() == SearchData.Type.DOMAIN) {
return; return;
} }
@ -995,14 +1097,20 @@ public class DiscoveryAttributes {
} }
/** /**
*
* Computes the CR frequency of all the given hashes and updates the list of * Computes the CR frequency of all the given hashes and updates the list of
* files. * files.
* *
* @param hashesToLookUp Hashes to find the frequency of. * @param hashesToLookUp Hashes to find the frequency of.
* @param currentFiles List of files to update with frequencies. * @param currentFiles List of files to update with frequencies.
* @param centralRepoDb The central repository being used. * @param centralRepoDb The central repository being used.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb) { private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb, SearchContext context) throws SearchCancellationException {
if (hashesToLookUp.isEmpty()) { if (hashesToLookUp.isEmpty()) {
return; return;
@ -1022,7 +1130,9 @@ public class DiscoveryAttributes {
FrequencyCallback callback = new FrequencyCallback(currentFiles); FrequencyCallback callback = new FrequencyCallback(currentFiles);
centralRepoDb.processSelectClause(selectClause, callback); centralRepoDb.processSelectClause(selectClause, callback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
} catch (CentralRepoException ex) { } catch (CentralRepoException ex) {
logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS
} }

View File

@ -59,6 +59,7 @@ public class DiscoveryKeyUtils {
private final List<AbstractFilter> filters; private final List<AbstractFilter> filters;
private final SleuthkitCase sleuthkitCase; private final SleuthkitCase sleuthkitCase;
private final CentralRepository centralRepository; private final CentralRepository centralRepository;
private final SearchContext context;
/** /**
* Construct a new SearchKey with all information that defines a search. * Construct a new SearchKey with all information that defines a search.
@ -70,16 +71,20 @@ public class DiscoveryKeyUtils {
* @param sortingMethod The method to sort the results by. * @param sortingMethod The method to sort the results by.
* @param sleuthkitCase The SleuthkitCase being searched. * @param sleuthkitCase The SleuthkitCase being searched.
* @param centralRepository The Central Repository being searched. * @param centralRepository The Central Repository being searched.
* @param context The SearchContext which reflects the search
* being performed to get results for this
* key.
*/ */
SearchKey(String userName, List<AbstractFilter> filters, SearchKey(String userName, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType, DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod, ResultsSorter.SortingMethod sortingMethod,
SleuthkitCase sleuthkitCase, CentralRepository centralRepository) { SleuthkitCase sleuthkitCase, CentralRepository centralRepository, SearchContext context) {
this.groupAttributeType = groupAttributeType; this.groupAttributeType = groupAttributeType;
this.groupSortingType = groupSortingType; this.groupSortingType = groupSortingType;
this.sortingMethod = sortingMethod; this.sortingMethod = sortingMethod;
this.filters = filters; this.filters = filters;
this.context = context;
StringBuilder searchStringBuilder = new StringBuilder(); StringBuilder searchStringBuilder = new StringBuilder();
searchStringBuilder.append(userName); searchStringBuilder.append(userName);
@ -93,8 +98,8 @@ public class DiscoveryKeyUtils {
} }
/** /**
* Construct a SearchKey without a SleuthkitCase or CentralRepositry * Construct a SearchKey without a SearchContext, SleuthkitCase or
* instance. * CentralRepositry instance.
* *
* @param userName The name of the user performing the search. * @param userName The name of the user performing the search.
* @param filters The Filters being used for the search. * @param filters The Filters being used for the search.
@ -107,7 +112,8 @@ public class DiscoveryKeyUtils {
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod) { ResultsSorter.SortingMethod sortingMethod) {
this(userName, filters, groupAttributeType, groupSortingType, this(userName, filters, groupAttributeType, groupSortingType,
sortingMethod, null, null); sortingMethod, null, null, null);
//this constructor should only be used putting things directly into a map or getting if present since casedb, cr, and search context will be null
} }
@Override @Override
@ -141,6 +147,23 @@ public class DiscoveryKeyUtils {
return hash; return hash;
} }
/**
* Get the SearchContext for the search this key is being used in.
*
* @return The SearchContext the search key is being used in.
*
* @throws DiscoveryException Thrown when the key being used has a null
* context indicating it was not created with
* knowledge of the case or central
* repository databases.
*/
SearchContext getContext() throws DiscoveryException {
if (context == null) {
throw new DiscoveryException("The key in use was created without a context and does not support retrieving information from the databases.");
}
return context;
}
/** /**
* Get the String representation of this key. * Get the String representation of this key.
* *

View File

@ -78,24 +78,31 @@ public class DomainSearch {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null * @param centralRepoDb The central repository database. Can be null
* if not needed. * if not needed.
* @param context The SearchContext the search is being performed from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters. * @return A LinkedHashMap grouped and sorted according to the parameters.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
public Map<GroupKey, Integer> getGroupSizes(String userName, public Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType, DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod, ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get( final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType, userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb); domainSortingMethod, caseDb, centralRepoDb, context);
// Transform the cached results into a map of group key to group size. // Transform the cached results into a map of group key to group size.
final LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>(); final LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) { for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size()); groupSizes.put(groupKey, searchResults.get(groupKey).size());
} }
@ -130,11 +137,11 @@ public class DomainSearch {
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod, ResultsSorter.SortingMethod domainSortingMethod,
GroupKey groupKey, int startingEntry, int numberOfEntries, GroupKey groupKey, int startingEntry, int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get( final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType, userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb); domainSortingMethod, caseDb, centralRepoDb, context);
final List<Result> domainsInGroup = searchResults.get(groupKey); final List<Result> domainsInGroup = searchResults.get(groupKey);
final List<Result> page = new ArrayList<>(); final List<Result> page = new ArrayList<>();
for (int i = startingEntry; (i < startingEntry + numberOfEntries) for (int i = startingEntry; (i < startingEntry + numberOfEntries)

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -55,20 +55,24 @@ class DomainSearchCache {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return Domain search results matching the given parameters. * @return Domain search results matching the given parameters.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
Map<GroupKey, List<Result>> get(String userName, Map<GroupKey, List<Result>> get(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType, DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod, ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try { try {
final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType,
groupSortingType, domainSortingMethod, caseDb, centralRepoDb); groupSortingType, domainSortingMethod, caseDb, centralRepoDb, context);
return cache.get(searchKey); return cache.get(searchKey);
} catch (ExecutionException ex) { } catch (ExecutionException ex) {
throw new DiscoveryException("Error fetching results from cache", ex.getCause()); throw new DiscoveryException("Error fetching results from cache", ex.getCause());

View File

@ -73,7 +73,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException(); throw new InterruptedException();
} }
attr.addAttributeToResults(domainResults, attr.addAttributeToResults(domainResults,
key.getSleuthkitCase(), key.getCentralRepository()); key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
} }
// Apply secondary in memory filters // Apply secondary in memory filters
for (AbstractFilter filter : key.getFilters()) { for (AbstractFilter filter : key.getFilters()) {
@ -81,7 +81,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException(); throw new InterruptedException();
} }
if (filter.useAlternateFilter()) { if (filter.useAlternateFilter()) {
domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository()); domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
} }
} }
// Sort the ResultDomains by the requested criteria. // Sort the ResultDomains by the requested criteria.

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -62,17 +62,21 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return The raw search results * @return The raw search results
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
static SearchResults runFileSearchDebug(String userName, static SearchResults runFileSearchDebug(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
AttributeType groupAttributeType, AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod, ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the // Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group // ResultFile objects will have all needed fields set when it's time to group
// and sort them. For example, if we're grouping by central repo frequency, we need // and sort them. For example, if we're grouping by central repo frequency, we need
@ -82,10 +86,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter // Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping // Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results // Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -114,21 +118,28 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters * @return A LinkedHashMap grouped and sorted according to the parameters
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
public static Map<GroupKey, Integer> getGroupSizes(String userName, public static Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
AttributeType groupAttributeType, AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod, ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
Map<GroupKey, List<Result>> searchResults = runFileSearch(userName, filters, Map<GroupKey, List<Result>> searchResults = runFileSearch(userName, filters,
groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>(); LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) { for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size()); groupSizes.put(groupKey, searchResults.get(groupKey).size());
} }
return groupSizes; return groupSizes;
@ -151,10 +162,14 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters * @return A LinkedHashMap grouped and sorted according to the parameters
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
public static List<Result> getFilesInGroup(String userName, public static List<Result> getFilesInGroup(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
@ -164,7 +179,7 @@ public class FileSearch {
GroupKey groupKey, GroupKey groupKey,
int startingEntry, int startingEntry,
int numberOfEntries, int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
//the group should be in the cache at this point //the group should be in the cache at this point
List<Result> filesInGroup = null; List<Result> filesInGroup = null;
SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod); SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod);
@ -178,7 +193,7 @@ public class FileSearch {
List<Result> page = new ArrayList<>(); List<Result> page = new ArrayList<>();
if (filesInGroup == null) { if (filesInGroup == null) {
logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey); logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey);
runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb); runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
synchronized (searchCache) { synchronized (searchCache) {
resultsMap = searchCache.getIfPresent(searchKey.getKeyString()); resultsMap = searchCache.getIfPresent(searchKey.getKeyString());
} }
@ -218,7 +233,6 @@ public class FileSearch {
TextSummarizer localSummarizer; TextSummarizer localSummarizer;
synchronized (searchCache) { synchronized (searchCache) {
localSummarizer = SummaryHelpers.getLocalSummarizer(); localSummarizer = SummaryHelpers.getLocalSummarizer();
} }
if (localSummarizer != null) { if (localSummarizer != null) {
try { try {
@ -247,17 +261,21 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if * @param centralRepoDb The central repository database. Can be null if
* not needed. * not needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @return A LinkedHashMap grouped and sorted according to the parameters * @return A LinkedHashMap grouped and sorted according to the parameters
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static Map<GroupKey, List<Result>> runFileSearch(String userName, public static Map<GroupKey, List<Result>> runFileSearch(String userName,
List<AbstractFilter> filters, List<AbstractFilter> filters,
AttributeType groupAttributeType, AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType, Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod, ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the // Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group // ResultFile objects will have all needed fields set when it's time to group
@ -268,10 +286,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes()); attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter // Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb); List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping // Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb); addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results // Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod); SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -295,13 +313,17 @@ public class FileSearch {
* @param caseDb The case database * @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if not * @param centralRepoDb The central repository database. Can be null if not
* needed. * needed.
* @param context The SearchContext the search is being performed
* from.
* *
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb) private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context)
throws DiscoveryException { throws DiscoveryException, SearchCancellationException {
for (AttributeType attr : attrs) { for (AttributeType attr : attrs) {
attr.addAttributeToResults(results, caseDb, centralRepoDb); attr.addAttributeToResults(results, caseDb, centralRepoDb, context);
} }
} }

View File

@ -0,0 +1,40 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
import java.util.concurrent.CancellationException;
/**
* Exception to be thrown when the search has been intentionally cancelled to
* provide information on where the code was when the cancellation took place.
*/
public class SearchCancellationException extends CancellationException {
private static final long serialVersionUID = 1L;
/**
* Construct a new SearchCancellationException with the specified message.
*
* @param message The text to use as the message for the exception.
*/
SearchCancellationException(String message) {
super(message);
}
}

View File

@ -0,0 +1,33 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Interface for providing feedback on if a search has been cancelled.
*
*/
public interface SearchContext {
/**
* Returns true if the search has been cancelled, false otherwise.
*
* @return True if the search has been cancelled, false otherwise.
*/
boolean searchIsCancelled();
}

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -60,10 +60,16 @@ public class SearchFiltering {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters * @param centralRepoDb The central repo. Can be null as long as no filters
* need it. * need it.
* @param context The SearchContext the search is being performed
* from.
* *
* @return List of Results from the search performed. * @return List of Results from the search performed.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException { static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (caseDb == null) { if (caseDb == null) {
throw new DiscoveryException("Case DB parameter is null"); // NON-NLS throw new DiscoveryException("Case DB parameter is null"); // NON-NLS
} }
@ -82,8 +88,11 @@ public class SearchFiltering {
// The file search filter is required, so this should never be empty. // The file search filter is required, so this should never be empty.
throw new DiscoveryException("Selected filters do not include a case database query"); throw new DiscoveryException("Selected filters do not include a case database query");
} }
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before result list could be retrieved.");
}
try { try {
return getResultList(filters, combinedQuery, caseDb, centralRepoDb); return getResultList(filters, combinedQuery, caseDb, centralRepoDb, context);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
throw new DiscoveryException("Error querying case database", ex); // NON-NLS throw new DiscoveryException("Error querying case database", ex); // NON-NLS
} }
@ -97,17 +106,23 @@ public class SearchFiltering {
* @param caseDb The case database. * @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters * @param centralRepoDb The central repo. Can be null as long as no filters
* need it. * need it.
* @param context The SearchContext the search is being performed
* from.
* *
* @return An ArrayList of Results returned by the query. * @return An ArrayList of Results returned by the query.
* *
* @throws TskCoreException * @throws TskCoreException
* @throws DiscoveryException * @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/ */
private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws TskCoreException, DiscoveryException { private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws TskCoreException, DiscoveryException, SearchCancellationException {
// Get all matching abstract files // Get all matching abstract files
List<Result> resultList = new ArrayList<>(); List<Result> resultList = new ArrayList<>();
List<AbstractFile> sqlResults = caseDb.findAllFilesWhere(combinedQuery); List<AbstractFile> sqlResults = caseDb.findAllFilesWhere(combinedQuery);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while the case database query was being performed.");
}
// If there are no results, return now // If there are no results, return now
if (sqlResults.isEmpty()) { if (sqlResults.isEmpty()) {
return resultList; return resultList;
@ -120,8 +135,11 @@ public class SearchFiltering {
// Now run any non-SQL filters. // Now run any non-SQL filters.
for (AbstractFilter filter : filters) { for (AbstractFilter filter : filters) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while alternate filters were being applied.");
}
if (filter.useAlternateFilter()) { if (filter.useAlternateFilter()) {
resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb); resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb, context);
} }
// There are no matches for the filters run so far, so return // There are no matches for the filters run so far, so return
if (resultList.isEmpty()) { if (resultList.isEmpty()) {
@ -243,7 +261,8 @@ public class SearchFiltering {
} }
/** /**
* Used by backend domain search code to query for additional artifact types. * Used by backend domain search code to query for additional artifact
* types.
*/ */
String getWhereClause(List<ARTIFACT_TYPE> nonVisibleArtifactTypesToInclude) { String getWhereClause(List<ARTIFACT_TYPE> nonVisibleArtifactTypesToInclude) {
StringJoiner joiner = joinStandardArtifactTypes(); StringJoiner joiner = joinStandardArtifactTypes();
@ -674,14 +693,17 @@ public class SearchFiltering {
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Set the frequency for each file // Set the frequency for each file
DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute(); DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute();
freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
// If the frequency matches the filter, add the file to the results // If the frequency matches the filter, add the file to the results
List<Result> frequencyResults = new ArrayList<>(); List<Result> frequencyResults = new ArrayList<>();
for (Result file : currentResults) { for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency alternate filter was being applied.");
}
if (frequencies.contains(file.getFrequency())) { if (frequencies.contains(file.getFrequency())) {
frequencyResults.add(file); frequencyResults.add(file);
} }
@ -723,9 +745,12 @@ public class SearchFiltering {
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<Result> filteredResults = new ArrayList<>(); List<Result> filteredResults = new ArrayList<>();
for (Result result : currentResults) { for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Known Account Type alternate filter was being applied.");
}
if (result instanceof ResultDomain) { if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result; ResultDomain domain = (ResultDomain) result;
if (domain.hasKnownAccountType()) { if (domain.hasKnownAccountType()) {
@ -765,11 +790,14 @@ public class SearchFiltering {
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute(); DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute();
previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb); previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
List<Result> filteredResults = new ArrayList<>(); List<Result> filteredResults = new ArrayList<>();
for (Result file : currentResults) { for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable alternate filter was being applied.");
}
if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) { if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) {
filteredResults.add(file); filteredResults.add(file);
} }
@ -1068,7 +1096,7 @@ public class SearchFiltering {
@Override @Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb, public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException { CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) { if (centralRepoDb == null) {
throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS
@ -1087,6 +1115,9 @@ public class SearchFiltering {
CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID); CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID);
for (Result result : currentResults) { for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Notable alternate filter was being applied.");
}
ResultFile file = (ResultFile) result; ResultFile file = (ResultFile) result;
if (result.getType() == SearchData.Type.DOMAIN) { if (result.getType() == SearchData.Type.DOMAIN) {
break; break;

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -31,7 +31,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.logging.Level; import java.util.logging.Level;
import javax.swing.SwingUtilities;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager; import org.openide.windows.WindowManager;
@ -574,7 +573,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
} }
private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed
// Get the selected filters setVisible(false); //set visible used here instead of dispose incase dispose code changes
final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent(); final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent();
if (tc == null) { if (tc == null) {
setValid("No Top Component Found"); setValid("No Top Component Found");
@ -584,6 +583,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
tc.open(); tc.open();
} }
tc.resetTopComponent(); tc.resetTopComponent();
// Get the selected filters
List<AbstractFilter> filters; List<AbstractFilter> filters;
if (videosButton.isSelected()) { if (videosButton.isSelected()) {
filters = videoFilterPanel.getFilters(); filters = videoFilterPanel.getFilters();
@ -617,7 +617,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
} }
searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort); searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort);
searchWorker.execute(); searchWorker.execute();
dispose();
tc.toFront(); tc.toFront();
tc.requestActive(); tc.requestActive();
}//GEN-LAST:event_searchButtonActionPerformed }//GEN-LAST:event_searchButtonActionPerformed
@ -651,6 +650,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
void cancelSearch() { void cancelSearch() {
if (searchWorker != null) { if (searchWorker != null) {
searchWorker.cancel(true); searchWorker.cancel(true);
searchWorker = null;
} }
} }
@ -750,7 +750,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) { || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) {
shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems); shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems);
} }
} }
} catch (NoCurrentCaseException notUsed) { } catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing. // Case is closed, do nothing.

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -260,7 +260,6 @@ public final class DiscoveryTopComponent extends TopComponent {
private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed
close(); close();
final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance(); final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance();
discDialog.cancelSearch();
discDialog.setVisible(true); discDialog.setVisible(true);
discDialog.validateDialog(); discDialog.validateDialog();
}//GEN-LAST:event_newSearchButtonActionPerformed }//GEN-LAST:event_newSearchButtonActionPerformed

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -36,6 +36,8 @@ import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter; import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.Result; import org.sleuthkit.autopsy.discovery.search.Result;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/** /**
* SwingWorker to retrieve the contents of a page. * SwingWorker to retrieve the contents of a page.
@ -87,7 +89,7 @@ final class PageWorker extends SwingWorker<Void, Void> {
@Override @Override
protected Void doInBackground() throws Exception { protected Void doInBackground() throws Exception {
SearchContext context = new SwingWorkerSearchContext(this);
try { try {
// Run the search // Run the search
if (resultType == SearchData.Type.DOMAIN) { if (resultType == SearchData.Type.DOMAIN) {
@ -96,17 +98,22 @@ final class PageWorker extends SwingWorker<Void, Void> {
groupingAttribute, groupingAttribute,
groupSort, groupSort,
fileSortMethod, groupKey, startingEntry, pageSize, fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo)); Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
} else { } else {
results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters, results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters,
groupingAttribute, groupingAttribute,
groupSort, groupSort,
fileSortMethod, groupKey, startingEntry, pageSize, fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo)); Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
} }
} catch (DiscoveryException ex) { } catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex); logger.log(Level.SEVERE, "Error running file search test", ex);
cancel(true); cancel(true);
} catch (SearchCancellationException ex) {
//The user does not explicitly have a way to cancel the loading of a page
//but they could have cancelled the search during the loading of the first page
//So this may or may not be an issue depending on when this occurred.
logger.log(Level.WARNING, "Search was cancelled while retrieving data for results page with starting entry: " + startingEntry, ex);
} }
return null; return null;
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -35,6 +35,8 @@ import org.sleuthkit.autopsy.discovery.search.FileSearch;
import org.sleuthkit.autopsy.discovery.search.DiscoveryException; import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch; import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter; import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
import org.sleuthkit.autopsy.discovery.search.SearchData; import org.sleuthkit.autopsy.discovery.search.SearchData;
/** /**
@ -75,23 +77,28 @@ final class SearchWorker extends SwingWorker<Void, Void> {
protected Void doInBackground() throws Exception { protected Void doInBackground() throws Exception {
try { try {
// Run the search // Run the search
SearchContext context = new SwingWorkerSearchContext(this);
if (searchType == SearchData.Type.DOMAIN) { if (searchType == SearchData.Type.DOMAIN) {
DomainSearch domainSearch = new DomainSearch(); DomainSearch domainSearch = new DomainSearch();
results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr, groupingAttr,
groupSortAlgorithm, groupSortAlgorithm,
fileSort, fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
} else { } else {
results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters, results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr, groupingAttr,
groupSortAlgorithm, groupSortAlgorithm,
fileSort, fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb)); Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
} }
} catch (DiscoveryException ex) { } catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex); logger.log(Level.SEVERE, "Error running file search test.", ex);
cancel(true); cancel(true);
} catch (SearchCancellationException ex) {
//search cancellation exceptions should indicate that the user chose to cancell this search
//so would not be a problem but we might be curious what was being done when it was cancelled
logger.log(Level.INFO, "Discovery search was cancelled.", ex);
} }
return null; return null;
} }

View File

@ -0,0 +1,45 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.ui;
import javax.swing.SwingWorker;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/**
* Implementation of SearchContext for searches being performed in the
* background thread of a SwingWorker.
*/
class SwingWorkerSearchContext implements SearchContext {
private final SwingWorker<Void, Void> searchWorker;
/**
* Construct a new SwingWorkerSearchContext.
*
* @param worker The SwingWorker the search is being performed in.
*/
SwingWorkerSearchContext(SwingWorker<Void, Void> worker) {
searchWorker = worker;
}
@Override
public boolean searchIsCancelled() {
return searchWorker.isCancelled();
}
}

View File

@ -48,6 +48,15 @@ class DataSourceFilter extends AbstractFileSearchFilter<DataSourcePanel> {
return this.getComponent().isSelected(); return this.getComponent().isSelected();
} }
/**
* Set the data source filter to select the specified data source initially.
*
* @param dataSourceId - The data source to select.
*/
void setSelectedDataSource(long dataSourceId) {
this.getComponent().setDataSourceSelected(dataSourceId);
}
/** /**
* Reset the data source filter to be up to date with the current case. * Reset the data source filter to be up to date with the current case.
*/ */

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018 Basis Technology Corp. * Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -152,6 +152,19 @@ public class DataSourcePanel extends javax.swing.JPanel {
this.dataSourceNoteLabel.setEnabled(enabled); this.dataSourceNoteLabel.setEnabled(enabled);
} }
/**
* Set the data source initially selected in this filter.
*
* @param dataSourceId - The object ID of the data source which will be
* selected.
*/
void setDataSourceSelected(long dataSourceId) {
this.dataSourceCheckBox.setSelected(true);
setComponentsEnabled();
String dataSourceName = dataSourceMap.get(dataSourceId);
dataSourceList.setSelectedValue(dataSourceName, true);
}
/** /**
* This method is called from within the constructor to initialize the form. * This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always * WARNING: Do NOT modify this code. The content of this method is always

View File

@ -32,6 +32,7 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static FileSearchAction instance = null; private static FileSearchAction instance = null;
private static FileSearchDialog searchDialog; private static FileSearchDialog searchDialog;
private static Long selectedDataSourceId;
FileSearchAction() { FileSearchAction() {
super(); super();
@ -39,7 +40,7 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), (PropertyChangeEvent evt) -> { Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), (PropertyChangeEvent evt) -> {
if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) {
setEnabled(evt.getNewValue() != null); setEnabled(evt.getNewValue() != null);
if (searchDialog != null && evt.getNewValue() != null){ if (searchDialog != null && evt.getNewValue() != null) {
searchDialog.resetCaseDependentFilters(); searchDialog.resetCaseDependentFilters();
} }
} }
@ -58,6 +59,8 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
if (searchDialog == null) { if (searchDialog == null) {
searchDialog = new FileSearchDialog(); searchDialog = new FileSearchDialog();
} }
//Preserve whatever the previously selected data source was
selectedDataSourceId = null;
searchDialog.setVisible(true); searchDialog.setVisible(true);
} }
@ -66,6 +69,8 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
if (searchDialog == null) { if (searchDialog == null) {
searchDialog = new FileSearchDialog(); searchDialog = new FileSearchDialog();
} }
//
searchDialog.setSelectedDataSourceFilter(selectedDataSourceId);
searchDialog.setVisible(true); searchDialog.setVisible(true);
} }
@ -85,7 +90,15 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
} }
@Override @Override
public void showDialog() { public void showDialog(Long dataSourceId) {
selectedDataSourceId = dataSourceId;
performAction(); performAction();
}
@Override
@Deprecated
public void showDialog() {
showDialog(null);
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -29,6 +29,8 @@ import org.openide.windows.WindowManager;
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
final class FileSearchDialog extends javax.swing.JDialog { final class FileSearchDialog extends javax.swing.JDialog {
private static final long serialVersionUID = 1L;
/** /**
* Creates new form FileSearchDialog * Creates new form FileSearchDialog
*/ */
@ -48,6 +50,15 @@ final class FileSearchDialog extends javax.swing.JDialog {
}); });
} }
/**
* Set the data source filter to select the specified data source initially.
*
* @param dataSourceId - The data source to select.
*/
void setSelectedDataSourceFilter(long dataSourceId) {
fileSearchPanel1.setDataSourceFilter(dataSourceId);
}
/** /**
* Reset the filters which are populated with options based on the contents * Reset the filters which are populated with options based on the contents
* of the current case. * of the current case.

View File

@ -62,8 +62,8 @@ class FileSearchPanel extends javax.swing.JPanel {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private final List<FileSearchFilter> filters = new ArrayList<>(); private final List<FileSearchFilter> filters = new ArrayList<>();
private static int resultWindowCount = 0; //keep track of result windows so they get unique names private static int resultWindowCount = 0; //keep track of result windows so they get unique names
private static MimeTypeFilter mimeTypeFilter = new MimeTypeFilter(); private static final MimeTypeFilter mimeTypeFilter = new MimeTypeFilter();
private static DataSourceFilter dataSourceFilter = new DataSourceFilter(); private static final DataSourceFilter dataSourceFilter = new DataSourceFilter();
private static final String EMPTY_WHERE_CLAUSE = NbBundle.getMessage(DateSearchFilter.class, "FileSearchPanel.emptyWhereClause.text"); private static final String EMPTY_WHERE_CLAUSE = NbBundle.getMessage(DateSearchFilter.class, "FileSearchPanel.emptyWhereClause.text");
private static SwingWorker<TableFilterNode, Void> searchWorker = null; private static SwingWorker<TableFilterNode, Void> searchWorker = null;
@ -106,7 +106,6 @@ class FileSearchPanel extends javax.swing.JPanel {
DateSearchFilter dateFilter = new DateSearchFilter(); DateSearchFilter dateFilter = new DateSearchFilter();
KnownStatusSearchFilter knowStatusFilter = new KnownStatusSearchFilter(); KnownStatusSearchFilter knowStatusFilter = new KnownStatusSearchFilter();
HashSearchFilter hashFilter = new HashSearchFilter(); HashSearchFilter hashFilter = new HashSearchFilter();
panel2.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.name"), nameFilter)); panel2.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.name"), nameFilter));
panel3.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.metadata"), sizeFilter)); panel3.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.metadata"), sizeFilter));
@ -149,6 +148,15 @@ class FileSearchPanel extends javax.swing.JPanel {
searchButton.setEnabled(isValidSearch()); searchButton.setEnabled(isValidSearch());
} }
/**
* Set the data source filter to select the specified data source initially.
*
* @param dataSourceId - The data source to select.
*/
void setDataSourceFilter(long dataSourceId) {
dataSourceFilter.setSelectedDataSource(dataSourceId);
}
/** /**
* @return true if any of the filters in the panel are enabled (checked) * @return true if any of the filters in the panel are enabled (checked)
*/ */

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -10,7 +10,6 @@ OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0}
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream. EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream.
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File) EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull=Full Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}. EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}.

View File

@ -23,7 +23,6 @@ OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0}
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream. EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream.
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File) EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull=Full Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1} EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}. EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}.

View File

@ -58,6 +58,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.modules.encryptiondetection.EncryptionDetectionModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMonitor; import org.sleuthkit.autopsy.ingest.IngestMonitor;
@ -95,8 +96,7 @@ class SevenZipExtractor {
//encryption type strings //encryption type strings
private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel"); "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel");
private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, private static final String ENCRYPTION_FULL = EncryptionDetectionModuleFactory.PASSWORD_PROTECT_MESSAGE;
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull");
//zip bomb detection //zip bomb detection
private static final int MAX_DEPTH = 4; private static final int MAX_DEPTH = 4;

View File

@ -119,7 +119,6 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
} }
@Messages({ @Messages({
"EncryptionDetectionFileIngestModule.artifactComment.password=Password protection detected.",
"EncryptionDetectionFileIngestModule.artifactComment.suspected=Suspected encryption due to high entropy (%f)." "EncryptionDetectionFileIngestModule.artifactComment.suspected=Suspected encryption due to high entropy (%f)."
}) })
@Override @Override
@ -160,7 +159,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
String.format(Bundle.EncryptionDetectionFileIngestModule_artifactComment_suspected(), calculatedEntropy)); String.format(Bundle.EncryptionDetectionFileIngestModule_artifactComment_suspected(), calculatedEntropy));
} else if (isFilePasswordProtected(file)) { } else if (isFilePasswordProtected(file)) {
return flagFile(file, BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE, return flagFile(file, BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE,
Bundle.EncryptionDetectionFileIngestModule_artifactComment_password()); EncryptionDetectionModuleFactory.PASSWORD_PROTECT_MESSAGE);
} }
} }
} catch (ReadContentInputStreamException | SAXException | TikaException | UnsupportedCodecException ex) { } catch (ReadContentInputStreamException | SAXException | TikaException | UnsupportedCodecException ex) {

View File

@ -36,10 +36,14 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
@ServiceProvider(service = IngestModuleFactory.class) @ServiceProvider(service = IngestModuleFactory.class)
@Messages({ @Messages({
"EncryptionDetectionFileIngestModule.moduleName.text=Encryption Detection", "EncryptionDetectionFileIngestModule.moduleName.text=Encryption Detection",
"EncryptionDetectionFileIngestModule.getDesc.text=Looks for files with the specified minimum entropy." "EncryptionDetectionFileIngestModule.getDesc.text=Looks for files with the specified minimum entropy.",
"EncryptionDetectionFileIngestModule.artifactComment.password=Password protection detected.",
}) })
public class EncryptionDetectionModuleFactory implements IngestModuleFactory { public class EncryptionDetectionModuleFactory implements IngestModuleFactory {
public static final String PASSWORD_PROTECT_MESSAGE = Bundle.EncryptionDetectionFileIngestModule_artifactComment_password();
@Override @Override
public String getModuleDisplayName() { public String getModuleDisplayName() {
return getModuleName(); return getModuleName();

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -52,7 +52,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(), new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME, Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -88,7 +88,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(), new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME, Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -121,7 +121,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(), new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME, Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DATA_SOURCE, ResultsSorter.SortingMethod.BY_DATA_SOURCE,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -155,7 +155,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(), new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_SIZE, Group.GroupSortingAlgorithm.BY_GROUP_SIZE,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME, ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null); caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class); DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains); when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -173,4 +173,5 @@ public class DomainSearchCacheLoaderTest {
} }
} }
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.junit.Test; import org.junit.Test;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey; import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey;
@ -46,11 +45,11 @@ public class DomainSearchTest {
); );
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null, Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null); new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue()); assertEquals(4, sizes.get(groupOne).longValue());
} }
@ -81,11 +80,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null, Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null); new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue()); assertEquals(4, sizes.get(groupOne).longValue());
assertEquals(3, sizes.get(groupTwo).longValue()); assertEquals(3, sizes.get(groupTwo).longValue());
assertEquals(1, sizes.get(groupThree).longValue()); assertEquals(1, sizes.get(groupThree).longValue());
@ -95,11 +94,11 @@ public class DomainSearchTest {
public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException { public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class); DomainSearchCache cache = mock(DomainSearchCache.class);
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>()); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(new HashMap<>());
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null, Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null); new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(0, sizes.size()); assertEquals(0, sizes.size());
} }
@ -120,11 +119,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size()); assertEquals(3, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i)); assertEquals(domains.get(i), firstPage.get(i));
@ -148,11 +147,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null, new TestSearchContextImpl(false));
assertEquals(4, firstPage.size()); assertEquals(4, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i)); assertEquals(domains.get(i), firstPage.get(i));
@ -176,11 +175,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size()); assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i)); assertEquals(domains.get(i), firstPage.get(i));
@ -204,11 +203,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null); new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null, new TestSearchContextImpl(false));
assertEquals(1, firstPage.size()); assertEquals(1, firstPage.size());
assertEquals(domains.get(domains.size() - 1), firstPage.get(0)); assertEquals(domains.get(domains.size() - 1), firstPage.get(0));
} }
@ -230,11 +229,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null); new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size()); assertEquals(0, firstPage.size());
} }
@ -255,11 +254,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size()); assertEquals(0, firstPage.size());
} }
@ -290,11 +289,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null); new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size()); assertEquals(3, firstPage.size());
} }
@ -325,11 +324,11 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null, List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null); new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size()); assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) { for (int i = 0; i < firstPage.size(); i++) {
assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i)); assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i));
@ -357,7 +356,7 @@ public class DomainSearchTest {
} }
}; };
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData); when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null); DomainSearch domainSearch = new DomainSearch(cache, null, null);
@ -365,9 +364,9 @@ public class DomainSearchTest {
int size = 2; int size = 2;
while (start + size <= domains.size()) { while (start + size <= domains.size()) {
List<Result> page = domainSearch.getDomainsInGroup(null, List<Result> page = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, start, size, null, null); new ArrayList<>(), null, null, null, groupOne, start, size, null, null, new TestSearchContextImpl(false));
assertEquals(2, page.size()); assertEquals(2, page.size());
for(int i = 0; i < page.size(); i++) { for (int i = 0; i < page.size(); i++) {
assertEquals(domains.get(start + i), page.get(i)); assertEquals(domains.get(start + i), page.get(i));
} }
@ -379,7 +378,7 @@ public class DomainSearchTest {
private final String name; private final String name;
public DummyKey(String name) { DummyKey(String name) {
this.name = name; this.name = name;
} }

View File

@ -0,0 +1,37 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Implementation of SearchContext for testing to ensure NPEs are not thrown and
* the context indicates the expected cancellation status.
*/
public class TestSearchContextImpl implements SearchContext {
private final boolean isCancelled;
public TestSearchContextImpl(boolean hasBeenCancelled) {
isCancelled = hasBeenCancelled;
}
@Override
public boolean searchIsCancelled() {
return isCancelled;
}
}

View File

@ -30,32 +30,35 @@ import javax.annotation.concurrent.Immutable;
public final class Manifest implements Serializable { public final class Manifest implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private final String filePath; private final Path filePath;
private final Date dateFileCreated; private final Date dateFileCreated;
private final String caseName; private final String caseName;
private final String deviceId; private final String deviceId;
private final String dataSourcePath; private final Path dataSourcePath;
private final String dataSourceFileName;
private final Map<String, String> manifestProperties; private final Map<String, String> manifestProperties;
public Manifest(Path manifestFilePath, Date dateFileCreated, String caseName, String deviceId, Path dataSourcePath, Map<String, String> manifestProperties) { public Manifest(Path manifestFilePath, Date dateFileCreated, String caseName, String deviceId, Path dataSourcePath, Map<String, String> manifestProperties) {
this.filePath = manifestFilePath.toString(); this.filePath = Paths.get(manifestFilePath.toString());
this.dateFileCreated = dateFileCreated; this.dateFileCreated = new Date(dateFileCreated.getTime());
this.caseName = caseName; this.caseName = caseName;
this.deviceId = deviceId; this.deviceId = deviceId;
if (null != dataSourcePath) { if (null != dataSourcePath) {
this.dataSourcePath = dataSourcePath.toString(); this.dataSourcePath = Paths.get(dataSourcePath.toString());
dataSourceFileName = dataSourcePath.getFileName().toString();
} else { } else {
this.dataSourcePath = ""; this.dataSourcePath = Paths.get("");
dataSourceFileName = "";
} }
this.manifestProperties = new HashMap<>(manifestProperties); this.manifestProperties = new HashMap<>(manifestProperties);
} }
public Path getFilePath() { public Path getFilePath() {
return Paths.get(this.filePath); return this.filePath;
} }
public Date getDateFileCreated() { public Date getDateFileCreated() {
return new Date(this.dateFileCreated.getTime()); return dateFileCreated;
} }
public String getCaseName() { public String getCaseName() {
@ -67,11 +70,11 @@ public final class Manifest implements Serializable {
} }
public Path getDataSourcePath() { public Path getDataSourcePath() {
return Paths.get(dataSourcePath); return dataSourcePath;
} }
public String getDataSourceFileName() { public String getDataSourceFileName() {
return Paths.get(dataSourcePath).getFileName().toString(); return dataSourceFileName;
} }
public Map<String, String> getManifestProperties() { public Map<String, String> getManifestProperties() {

View File

@ -177,14 +177,6 @@ final class ChromeCacheExtractor {
currentCase = Case.getCurrentCaseThrows(); currentCase = Case.getCurrentCaseThrows();
fileManager = currentCase.getServices().getFileManager(); fileManager = currentCase.getServices().getFileManager();
// Create an output folder to save any derived files
absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId());
relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString();
File dir = new File(absOutputFolderName);
if (dir.exists() == false) {
dir.mkdirs();
}
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
String msg = "Failed to get current case."; //NON-NLS String msg = "Failed to get current case."; //NON-NLS
throw new IngestModuleException(msg, ex); throw new IngestModuleException(msg, ex);
@ -279,6 +271,17 @@ final class ChromeCacheExtractor {
// Identify each cache folder by searching for the index files in each // Identify each cache folder by searching for the index files in each
List<AbstractFile> indexFiles = findIndexFiles(); List<AbstractFile> indexFiles = findIndexFiles();
if (indexFiles.size() > 0) {
// Create an output folder to save any derived files
absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId());
relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString();
File dir = new File(absOutputFolderName);
if (dir.exists() == false) {
dir.mkdirs();
}
}
// Process each of the cache folders // Process each of the cache folders
for (AbstractFile indexFile: indexFiles) { for (AbstractFile indexFile: indexFiles) {

View File

@ -76,29 +76,28 @@ With our connection in hand, we can do some queries. In our sample database, we
stmt = dbConn.createStatement() stmt = dbConn.createStatement()
resultSet = stmt.executeQuery("SELECT * FROM contacts")\endverbatim resultSet = stmt.executeQuery("SELECT * FROM contacts")\endverbatim
For each row, we are going to get the values for the name, e-mail, and phone number and make a TSK_CONTACT artifact. Recall from the first tutorial that posting artifacts to the blackboard allows modules to communicate with each other and also allows you to easily display data to the user. The TSK_CONTACT artifact is for storing contact information. For each row, we are going to get the values for the name, e-mail, and phone number and make a TSK_CONTACT artifact. Recall from the first tutorial that posting artifacts to the blackboard allows modules to communicate with each other and also allows you to easily display data to the user. The TSK_CONTACT artifact is for storing contact information. The <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest/artifact_catalog_page.html">artifact catalog</a> shows that TSK_CONTACT is a data artifact, so we will be using the newDataArtifact() method to create each one.
The basic approach in our example is to make an artifact of a given type (TSK_CONTACT) and have it be associated with the database it came from. We then make attributes for the name, email, and phone. The following code does this for each row in the database: The basic approach in our example is to make an artifact of a given type (TSK_CONTACT) and have it be associated with the database it came from. We then make attributes for the name, email, and phone. The following code does this for each row in the database:
\verbatim \verbatim
while resultSet.next(): while resultSet.next():
try:
# Make an artifact on the blackboard and give it attributes
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT)
name = resultSet.getString("name") name = resultSet.getString("name")
art.addAttribute(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID(),
ContactsDbIngestModuleFactory.moduleName, name))
email = resultSet.getString("email") email = resultSet.getString("email")
art.addAttribute(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID(),
ContactsDbIngestModuleFactory.moduleName, email))
phone = resultSet.getString("phone") phone = resultSet.getString("phone")
art.addAttribute(BlackboardAttribute( except SQLException as e:
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(), self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")")
ContactsDbIngestModuleFactory.moduleName, phone))\endverbatim
# Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields
art = file.newDataArtifact(BlackboardArtifact.Type.TSK_CONTACT, Arrays.asList(
BlackboardAttribute(BlackboardAttribute.Type.TSK_NAME_PERSON,
ContactsDbIngestModuleFactory.moduleName, name),
BlackboardAttribute(BlackboardAttribute.Type.TSK_EMAIL,
ContactsDbIngestModuleFactory.moduleName, email),
BlackboardAttribute(BlackboardAttribute.Type.TSK_PHONE_NUMBER,
ContactsDbIngestModuleFactory.moduleName, phone)
))\endverbatim
That's it. We've just found the databases, queried them, and made artifacts for the user to see. There are some final things though. First, we should fire off an event so that the UI updates and refreshes with the new artifacts. We can fire just one event after each database is parsed (or you could fire one for each artifact - it's up to you). That's it. We've just found the databases, queried them, and made artifacts for the user to see. There are some final things though. First, we should fire off an event so that the UI updates and refreshes with the new artifacts. We can fire just one event after each database is parsed (or you could fire one for each artifact - it's up to you).
@ -113,6 +112,8 @@ stmt.close()
dbConn.close() dbConn.close()
os.remove(lclDbPath)\endverbatim os.remove(lclDbPath)\endverbatim
The final version of findContactsDb.py can be found on <a href="https://github.com/sleuthkit/autopsy/blob/develop/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb.py">github</a>.
\subsection python_tutorial2_niceties Niceties \subsection python_tutorial2_niceties Niceties
Data source-level ingest modules can run for quite some time. Therefore, data source-level ingest modules should do some additional things that file-level ingest modules do not need to. Data source-level ingest modules can run for quite some time. Therefore, data source-level ingest modules should do some additional things that file-level ingest modules do not need to.

View File

@ -75,68 +75,56 @@ The process() method is passed in a reference to an AbstractFile Object. With th
Now that we have found the files, we want to do something with them. In our situation, we just want to alert the user to them. We do this by making an "Interesting Item" blackboard artifact. The <a href="https://sleuthkit.org/sleuthkit/docs/jni-docs/latest/mod_bbpage.html" target="_blank" rel="noopener noreferrer">Blackboard</a> is where ingest modules can communicate with each other and with the Autopsy GUI. The blackboard has a set of artifacts on it and each artifact:</p> Now that we have found the files, we want to do something with them. In our situation, we just want to alert the user to them. We do this by making an "Interesting Item" blackboard artifact. The <a href="https://sleuthkit.org/sleuthkit/docs/jni-docs/latest/mod_bbpage.html" target="_blank" rel="noopener noreferrer">Blackboard</a> is where ingest modules can communicate with each other and with the Autopsy GUI. The blackboard has a set of artifacts on it and each artifact:</p>
<ul> <ul>
<li>Has a type</li> <li>Has a type</li>
<li>Has a category</li>
<li>Is associated with a file</li> <li>Is associated with a file</li>
<li>Has one or more attributes. Attributes are simply name and value pairs.</li> <li>Has one or more attributes. Attributes are simply name and value pairs.</li>
</ul> </ul>
For our example, we are going to make an artifact of type "TSK_INTERESTING_FILE" whenever we find a big and round file. These are one of the most generic artifact types and are simply a way of alerting the user that a file is interesting for some reason. Once you make the artifact, it will be shown in the UI. The below code makes an artifact for the file and puts it into the set of "Big and Round Files". You can create whatever set names you want. The Autopsy GUI organizes Interesting Files by their set name. A list of standard artifact types can be found in the <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest/artifact_catalog_page.html">artifact catalog</a>. It is important to note the catagory for the artifact you want to since this affects which method you will use to create the artifact.
\verbatim
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(),
FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files")
art.addAttribute(att)\endverbatim
The above code adds the artifact and a single attribute to the blackboard in the embedded database, but it does not notify other modules or the UI. The UI will eventually refresh, but it is faster to fire an event with this: For our example, we are going to make an artifact of type "TSK_INTERESTING_FILE", which is an analysis result, whenever we find a big and round file. These are one of the most generic artifact types and are simply a way of alerting the user that a file is interesting for some reason. Once you make the artifact, it will be shown in the UI. The below code makes an artifact for the file and puts it into the set of "Big and Round Files". You can create whatever set names you want. The Autopsy GUI organizes Interesting Files by their set name.
\verbatim \verbatim
IngestServices.getInstance().fireModuleDataEvent( art = file.newAnalysisResult(BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE,
ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName, None, "Big and Round Files", None,
BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))\endverbatim Arrays.asList(
BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME,
FindBigRoundFilesIngestModuleFactory.moduleName,
"Big and Round Files"))).getAnalysisResult()\endverbatim
The above code adds the artifact and a single attribute to the blackboard in the embedded database, but it does not notify other modules or the UI. Calling postArtifact() will let the tree viewer and other parts of the UI know that a refresh may be necessary, and passes the newly created artifacts to other modules that may do further processing on it.
\verbatim
blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName)\endverbatim
That's it. Your process() method should look something like this: That's it. Your process() method should look something like this:
\verbatim \verbatim
def process(self, file): def process(self, file):
# Use blackboard class to index blackboard artifacts for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
# Skip non-files # Skip non-files
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or
(file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or
(file.isFile() == False)): (file.isFile() == False)):
return IngestModule.ProcessResult.OK return IngestModule.ProcessResult.OK
# Look for files bigger than 10MB that are a multiple of 4096 # Look for files bigger than 10MB that are a multiple of 4096
if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)):
if ((file.getSize() &gt; 10485760) and ((file.getSize() % 4096) == 0)):
# Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of # Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of
# artifact. Refer to the developer docs for other examples. # artifact. Refer to the developer docs for other examples.
art = file.newAnalysisResult(BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE,
None, "Big and Round Files", None,
Arrays.asList(
BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME,
FindBigRoundFilesIngestModuleFactory.moduleName,
"Big and Round Files"))).getAnalysisResult()
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT) try:
# post the artifact for listeners of artifact events
att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName)
except Blackboard.BlackboardException as e:
FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files") self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName())
art.addAttribute(att)
# Fire an event to notify the UI and others that there is a new artifact
IngestServices.getInstance().fireModuleDataEvent(
ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName,
BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))
return IngestModule.ProcessResult.OK\endverbatim return IngestModule.ProcessResult.OK\endverbatim
@ -144,6 +132,8 @@ Save this file and run the module on some of your data. If you have any big and
\image html bigAndRoundFiles.png \image html bigAndRoundFiles.png
The full big and round file module along with test data can be found on <a href="https://github.com/sleuthkit/autopsy/tree/develop/pythonExamples/July2015FileTutorial_BigRound">github</a>.
\subsection python_tutorial1_debug Debugging and Development Tips \subsection python_tutorial1_debug Debugging and Development Tips
Whenever you have syntax errors or other errors in your script, you will get some form of dialog from Autopsy when you try to run ingest modules. If that happens, fix the problem and run ingest modules again. You don't need to restart Autopsy each time! Whenever you have syntax errors or other errors in your script, you will get some form of dialog from Autopsy when you try to run ingest modules. If that happens, fix the problem and run ingest modules again. You don't need to restart Autopsy each time!

View File

@ -45,7 +45,7 @@ A third approach is to call org.sleuthkit.autopsy.casemodule.Case.getDataSources
\subsubsection python_tutorial3_getting_artifacts Getting Blackboard Artifacts \subsubsection python_tutorial3_getting_artifacts Getting Blackboard Artifacts
The blackboard is where modules store their analysis results. If you want to include them in your report, then there are several methods that you could use. If you want all artifacts of a given type, then you can use <a href="https://sleuthkit.org/sleuthkit/docs/jni-docs/latest/classorg_1_1sleuthkit_1_1datamodel_1_1_sleuthkit_case.html#a0b8396fac6c40d8291cc48732dd15d74">SleuthkitCase.getBlackboardArtifacts()</a>. There are many variations of this method that take different arguments. Look at them to find the one that is most convenient for you. The blackboard is where modules store their analysis results. If you want to include them in your report, then there are several methods that you could use. If you want all artifacts of a given type, then you can use <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest//classorg_1_1sleuthkit_1_1datamodel_1_1_blackboard.html#af7261eb61cd05a4d457910eed599dd54">getDataArtifacts()</a>or <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest//classorg_1_1sleuthkit_1_1datamodel_1_1_blackboard.html#a563cbd08810a1b31ef2ecf0ebf0b7356">Blackboard.getAnalysisResultsByType()</a>. There are variations of these methods that take different arguments. Look at them to find the one that is most convenient for you.
\subsubsection python_tutorial3_getting_tags Getting Tagged Files or Artifacts \subsubsection python_tutorial3_getting_tags Getting Tagged Files or Artifacts