Merge branch 'sleuthkit:develop' into develop

This commit is contained in:
Seb2lyon 2021-08-28 12:19:20 +02:00 committed by GitHub
commit 6cde752006
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 781 additions and 331 deletions

View File

@ -87,27 +87,10 @@ public final class OtherOccurrences {
if (osAccountAddr.isPresent()) {
try {
for (OsAccountInstance instance : osAccount.getOsAccountInstances()) {
DataSource osAccountDataSource = instance.getDataSource();
try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
osAccountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, instance.getDataSource()),
"",
"",
TskData.FileKnown.KNOWN,
osAccount.getId());
for (OsAccountInstance instance : osAccount.getOsAccountInstances()) {
CorrelationAttributeInstance correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttr(instance.getOsAccount(), instance.getDataSource());
if (correlationAttributeInstance != null) {
ret.add(correlationAttributeInstance);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", osAccountAddr.get()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, String.format("Exception while getting open case looking up osAccount %s.", osAccountAddr.get()), ex); //NON-NLS
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, String.format("Exception with Correlation Attribute Normalization for osAccount %s.", osAccountAddr.get()), ex); //NON-NLS
}
}
} catch (TskCoreException ex) {

View File

@ -40,6 +40,8 @@ import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.InvalidAccountIDException;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.OsAccountInstance;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -568,6 +570,51 @@ public class CorrelationAttributeUtil {
}
}
/**
* Makes a correlation attribute instance of a given type from an OS
* account. Checks address if it is null, or one of the ones always present
* on a windows system and thus not unique.
*
* @param osAccoun The OS account.
* @param dataSource The data source content object.
*
* @return The correlation attribute instance or null, if an error occurred.
*/
public static CorrelationAttributeInstance makeCorrAttr(OsAccount osAccount, Content dataSource) {
Optional<String> accountAddr = osAccount.getAddr();
// Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system
// and they are not unique
if (!accountAddr.isPresent() || accountAddr.get().equals("S-1-5-18") || accountAddr.get().equals("S-1-5-19") || accountAddr.get().equals("S-1-5-20")) {
return null;
}
try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
accountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource),
"",
"",
TskData.FileKnown.KNOWN,
osAccount.getId());
return correlationAttributeInstance;
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
return null;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return null;
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
return null;
}
}
/**
* Gets the correlation attribute instance for a file.
*

View File

@ -661,7 +661,8 @@ public final class CaseEventListener implements PropertyChangeListener {
"CaseEventsListener.prevCaseComment.text=Users seen in previous cases",
"CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"})
/**
* Add OsAccount Instance to CR and find interesting items based on the OsAccount
* Add OsAccount Instance to CR and find interesting items based on the
* OsAccount
*/
private final class OsAccountInstancesAddedTask implements Runnable {
@ -677,7 +678,7 @@ public final class CaseEventListener implements PropertyChangeListener {
@Override
public void run() {
//Nothing to do here if the central repo is not enabled or if ingest is running but is set to not save data/make artifacts
if (!CentralRepository.isEnabled()
if (!CentralRepository.isEnabled()
|| (IngestManager.getInstance().isIngestRunning() && !(IngestEventsListener.isFlagSeenDevices() || IngestEventsListener.shouldCreateCrProperties()))) {
return;
}
@ -687,27 +688,15 @@ public final class CaseEventListener implements PropertyChangeListener {
for (OsAccountInstance osAccountInstance : addedOsAccountNew) {
try {
OsAccount osAccount = osAccountInstance.getOsAccount();
Optional<String> accountAddr = osAccount.getAddr();
// Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system
// and they are not unique
if (!accountAddr.isPresent() || accountAddr.get().equals("S-1-5-18") || accountAddr.get().equals("S-1-5-19") || accountAddr.get().equals("S-1-5-20")) {
CorrelationAttributeInstance correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttr(osAccount, osAccountInstance.getDataSource());
if (correlationAttributeInstance == null) {
return;
}
Optional<String> accountAddr = osAccount.getAddr();
try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
accountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, osAccountInstance.getDataSource()),
"",
"",
TskData.FileKnown.KNOWN,
osAccount.getId());
// Save to the database if requested
if(IngestEventsListener.shouldCreateCrProperties()) {
if (IngestEventsListener.shouldCreateCrProperties()) {
dbManager.addArtifactInstance(correlationAttributeInstance);
}
@ -740,14 +729,11 @@ public final class CaseEventListener implements PropertyChangeListener {
}
}
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
} catch (CorrelationAttributeNormalizationException ex) {
LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex);
}

View File

@ -40,18 +40,17 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
@ServiceProvider(service = DataContentViewer.class, position = 7)
public class AnalysisResultsContentViewer implements DataContentViewer {
private static final Logger logger = Logger.getLogger(AnalysisResultsContentPanel.class.getName());
// isPreferred value
private static final int PREFERRED_VALUE = 3;
private final AnalysisResultsViewModel viewModel = new AnalysisResultsViewModel();
private final AnalysisResultsContentPanel panel = new AnalysisResultsContentPanel();
private SwingWorker<?, ?> worker = null;
@NbBundle.Messages({
"AnalysisResultsContentViewer_title=Analysis Results"
})
@ -135,11 +134,11 @@ public class AnalysisResultsContentViewer implements DataContentViewer {
if (content instanceof AnalysisResult) {
return true;
}
if (content == null || content instanceof BlackboardArtifact) {
continue;
}
try {
if (Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().hasAnalysisResults(content.getId())) {
return true;
@ -148,7 +147,7 @@ public class AnalysisResultsContentViewer implements DataContentViewer {
logger.log(Level.SEVERE, "Unable to get analysis results for file with obj id " + content.getId(), ex);
}
}
return false;
}

View File

@ -252,7 +252,7 @@ public class AnalysisResultsViewModel {
try {
nodeContent = Optional.of(content);
// get the aggregate score of that content
aggregateScore = Optional.ofNullable(content.getAggregateScore());

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012-2019 Basis Technology Corp.
* Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -110,7 +110,7 @@ public class ImageNode extends AbstractContentNode<Image> {
actionsList.add(a);
}
actionsList.addAll(ExplorerNodeActionVisitor.getActions(content));
actionsList.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text()));
actionsList.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text(), content.getId()));
actionsList.add(new ViewSummaryInformationAction(content.getId()));
actionsList.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));
actionsList.add(new NewWindowViewAction(NbBundle.getMessage(this.getClass(), "ImageNode.getActions.viewInNewWin.text"), this));

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2017-2019 Basis Technology Corp.
* Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -65,7 +65,7 @@ public abstract class SpecialDirectoryNode extends AbstractAbstractFileNode<Spec
actions.add(ExtractAction.getInstance());
actions.add(ExportCSVAction.getInstance());
actions.add(null); // creates a menu separator
actions.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text()));
actions.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text(), content.getId()));
if (content.isDataSource()) {
actions.add(new ViewSummaryInformationAction(content.getId()));
actions.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));

View File

@ -1,15 +1,15 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
*
* Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -28,14 +28,26 @@ import org.openide.util.Lookup;
*/
public class FileSearchAction extends AbstractAction {
private final Long dataSourceId;
public FileSearchAction(String title, long dataSourceID) {
super(title);
dataSourceId = dataSourceID;
}
public FileSearchAction(String title) {
super(title);
dataSourceId = null;
}
@Override
public void actionPerformed(ActionEvent e) {
FileSearchProvider searcher = Lookup.getDefault().lookup(FileSearchProvider.class);
searcher.showDialog();
if (dataSourceId == null) {
searcher.showDialog();
} else {
searcher.showDialog(dataSourceId);
}
}
}

View File

@ -1,15 +1,15 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
*
* Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -23,5 +23,8 @@ package org.sleuthkit.autopsy.directorytree;
*/
public interface FileSearchProvider {
public void showDialog(Long dataSourceID);
@Deprecated
public void showDialog();
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -54,14 +54,18 @@ public abstract class AbstractFilter {
* @param caseDb The case database
* @param centralRepoDb The central repo database. Can be null if the
* filter does not require it.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return The list of results that match this filter (and any that came
* before it)
*
* @throws DiscoveryException
* @throws SearchCancellationException Thrown when the user has cancelled
* the search.
*/
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
return new ArrayList<>();
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -78,10 +78,14 @@ public class DiscoveryAttributes {
* @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Default is to do nothing
}
}
@ -154,10 +158,13 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try {
Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb);
Map<String, Set<String>> domainsToCategories = getDomainsWithWebCategories(caseDb, context);
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain Category Attribute was being added.");
}
if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result;
domain.addWebCategories(domainsToCategories.get(domain.getDomain()));
@ -172,14 +179,29 @@ public class DiscoveryAttributes {
* Loads all TSK_WEB_CATEGORY artifacts and maps the domain attribute to
* the category name attribute. Each ResultDomain is then parsed and
* matched against this map of values.
*
* @param caseDb The case database.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return domainToCategory - A map of the domain names to the category
* name attribute they are classified as.
*
* @throws TskCoreException
* @throws InterruptedException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb) throws TskCoreException, InterruptedException {
private Map<String, Set<String>> getDomainsWithWebCategories(SleuthkitCase caseDb, SearchContext context) throws TskCoreException, InterruptedException, SearchCancellationException {
Map<String, Set<String>> domainToCategory = new HashMap<>();
for (BlackboardArtifact artifact : caseDb.getBlackboardArtifacts(TSK_WEB_CATEGORIZATION)) {
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException();
}
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while getting domains for artifact type: " + artifact.getDisplayName());
}
BlackboardAttribute webCategory = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME));
BlackboardAttribute domain = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN));
if (webCategory != null && domain != null) {
@ -206,14 +228,16 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, keyword list name) for all files in the list of files that have
// keyword list hits.
String selectQuery = createSetNameClause(results, BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID(),
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
SetKeywordListNamesCallback callback = new SetKeywordListNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Keyword List Attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -278,8 +302,20 @@ public class DiscoveryAttributes {
* Example: query for notable status of google.com. Result: notable With
* this map, all domain instances that represent google.com can be updated
* after one simple lookup.
*
* @param domainsBatch The list of ResultDomains to organize.
* @param attributeType The type of correlation attribute being organized.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @return resultDomainTable - A map of the normalized domain name to the
* list of ResultDomain objects which are part of that normalized
* domain.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType) {
private static Map<String, List<ResultDomain>> organizeByValue(List<ResultDomain> domainsBatch, CorrelationAttributeInstance.Type attributeType, SearchContext context) throws SearchCancellationException {
final Map<String, List<ResultDomain>> resultDomainTable = new HashMap<>();
for (ResultDomain domainInstance : domainsBatch) {
try {
@ -288,6 +324,9 @@ public class DiscoveryAttributes {
final List<ResultDomain> bucket = resultDomainTable.getOrDefault(normalizedDomain, new ArrayList<>());
bucket.add(domainInstance);
resultDomainTable.put(normalizedDomain, bucket);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while orgainizing domains by their normalized value.");
}
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, String.format("Domain [%s] failed normalization, skipping...", domainInstance.getDomain()));
}
@ -322,39 +361,73 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb != null) {
processFilesWithCr(results, centralRepoDb);
processFilesWithCr(results, centralRepoDb, context);
}
}
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo) throws DiscoveryException {
/**
* Helper method to batch the domain results and check for notability.
*
* @param results The results which are being checked for previously
* being notable in the CR.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void processFilesWithCr(List<Result> results, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultDomain> domainsBatch = new ArrayList<>();
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable attribute was being calculated with the CR.");
}
if (result.getType() == SearchData.Type.DOMAIN) {
domainsBatch.add((ResultDomain) result);
if (domainsBatch.size() == DOMAIN_BATCH_SIZE) {
queryPreviouslyNotable(domainsBatch, centralRepo);
queryPreviouslyNotable(domainsBatch, centralRepo, context);
domainsBatch.clear();
}
}
}
queryPreviouslyNotable(domainsBatch, centralRepo);
queryPreviouslyNotable(domainsBatch, centralRepo, context);
}
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo) throws DiscoveryException {
/**
* Helper method to check a batch of domains for notability.
*
*
* @param domainsBatch The list of ResultDomains to check for
* notability.
* @param centralRepo The central repository being used to check for
* notability.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void queryPreviouslyNotable(List<ResultDomain> domainsBatch, CentralRepository centralRepo, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsBatch.isEmpty()) {
return;
}
try {
final CorrelationAttributeInstance.Type attributeType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsBatch, attributeType, context);
final String values = createCSV(resultDomainTable.keySet());
if (context.searchIsCancelled()) {
throw new SearchCancellationException("Search was cancelled while checking for previously notable domains.");
}
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name "
+ "FROM " + tableName + " "
@ -421,7 +494,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) {
for (Result result : results) {
if (result.getFrequency() == SearchData.Frequency.UNKNOWN && result.getKnown() == TskData.FileKnown.KNOWN) {
@ -429,7 +502,7 @@ public class DiscoveryAttributes {
}
}
} else {
processResultFilesForCR(results, centralRepoDb);
processResultFilesForCR(results, centralRepoDb, context);
}
}
@ -437,16 +510,26 @@ public class DiscoveryAttributes {
* Private helper method for adding Frequency attribute when CR is
* enabled.
*
* @param files The list of ResultFiles to caluclate frequency
* for.
* @param centralRepoDb The central repository currently in use.
* @param results The results which are having their frequency
* checked.
* @param centralRepoDb The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has
* cancelled the search.
*/
private void processResultFilesForCR(List<Result> results,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<ResultFile> currentFiles = new ArrayList<>();
Set<String> hashesToLookUp = new HashSet<>();
List<ResultDomain> domainsToQuery = new ArrayList<>();
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency attribute was being calculated with the CR.");
}
// If frequency was already calculated, skip...
if (result.getFrequency() == SearchData.Frequency.UNKNOWN) {
if (result.getKnown() == TskData.FileKnown.KNOWN) {
@ -462,7 +545,7 @@ public class DiscoveryAttributes {
}
if (hashesToLookUp.size() >= BATCH_SIZE) {
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
hashesToLookUp.clear();
currentFiles.clear();
@ -470,16 +553,15 @@ public class DiscoveryAttributes {
} else {
domainsToQuery.add((ResultDomain) result);
if (domainsToQuery.size() == DOMAIN_BATCH_SIZE) {
queryDomainFrequency(domainsToQuery, centralRepoDb);
queryDomainFrequency(domainsToQuery, centralRepoDb, context);
domainsToQuery.clear();
}
}
}
}
queryDomainFrequency(domainsToQuery, centralRepoDb);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb);
queryDomainFrequency(domainsToQuery, centralRepoDb, context);
computeFrequency(hashesToLookUp, currentFiles, centralRepoDb, context);
}
}
@ -487,17 +569,22 @@ public class DiscoveryAttributes {
* Query to get the frequency of a domain.
*
* @param domainsToQuery List of domains to check the frequency of.
* @param centralRepository The central repository to query.
* @param centralRepository The central repository being used to check
* frequency.
* @param context The SearchContext the search which is applying
* this filter is being performed from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository) throws DiscoveryException {
private static void queryDomainFrequency(List<ResultDomain> domainsToQuery, CentralRepository centralRepository, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (domainsToQuery.isEmpty()) {
return;
}
try {
final CorrelationAttributeInstance.Type attributeType = centralRepository.getCorrelationTypeById(CorrelationAttributeInstance.DOMAIN_TYPE_ID);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType);
final Map<String, List<ResultDomain>> resultDomainTable = organizeByValue(domainsToQuery, attributeType, context);
final String values = createCSV(resultDomainTable.keySet());
final String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(attributeType);
final String domainFrequencyQuery = " value AS domain_name, COUNT(value) AS frequency FROM"
@ -508,8 +595,11 @@ public class DiscoveryAttributes {
+ ")) AS foo GROUP BY value";
final DomainFrequencyCallback frequencyCallback = new DomainFrequencyCallback(resultDomainTable);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
centralRepository.processSelectClause(domainFrequencyQuery, frequencyCallback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
if (frequencyCallback.getCause() != null) {
throw frequencyCallback.getCause();
}
@ -620,7 +710,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, hash set name) for all files in the list of files that have
// hash set hits.
@ -628,6 +718,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
HashSetNamesCallback callback = new HashSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Hash Hit attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -695,7 +788,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, interesting item set name) for all files in the list of files that have
// interesting file set hits.
@ -703,6 +796,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
InterestingFileSetNamesCallback callback = new InterestingFileSetNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Interesting Item attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -808,7 +904,7 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Get pairs of (object ID, object type name) for all files in the list of files that have
// objects detected
@ -816,6 +912,9 @@ public class DiscoveryAttributes {
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID());
ObjectDetectedNamesCallback callback = new ObjectDetectedNamesCallback(results);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Object Detected attribute was being added.");
}
try {
caseDb.getCaseDbAccessManager().select(selectQuery, callback);
} catch (TskCoreException ex) {
@ -884,10 +983,13 @@ public class DiscoveryAttributes {
@Override
public void addAttributeToResults(List<Result> results, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try {
for (Result result : results) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while File Tag attribute was being added.");
}
if (result.getType() == SearchData.Type.DOMAIN) {
return;
}
@ -995,14 +1097,20 @@ public class DiscoveryAttributes {
}
/**
*
* Computes the CR frequency of all the given hashes and updates the list of
* files.
*
* @param hashesToLookUp Hashes to find the frequency of.
* @param currentFiles List of files to update with frequencies.
* @param centralRepoDb The central repository being used.
* @param context The SearchContext the search which is applying this
* filter is being performed from.
*
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb) {
private static void computeFrequency(Set<String> hashesToLookUp, List<ResultFile> currentFiles, CentralRepository centralRepoDb, SearchContext context) throws SearchCancellationException {
if (hashesToLookUp.isEmpty()) {
return;
@ -1022,7 +1130,9 @@ public class DiscoveryAttributes {
FrequencyCallback callback = new FrequencyCallback(currentFiles);
centralRepoDb.processSelectClause(selectClause, callback);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Domain frequency was being queried with the CR.");
}
} catch (CentralRepoException ex) {
logger.log(Level.WARNING, "Error getting frequency counts from Central Repository", ex); // NON-NLS
}

View File

@ -59,6 +59,7 @@ public class DiscoveryKeyUtils {
private final List<AbstractFilter> filters;
private final SleuthkitCase sleuthkitCase;
private final CentralRepository centralRepository;
private final SearchContext context;
/**
* Construct a new SearchKey with all information that defines a search.
@ -70,16 +71,20 @@ public class DiscoveryKeyUtils {
* @param sortingMethod The method to sort the results by.
* @param sleuthkitCase The SleuthkitCase being searched.
* @param centralRepository The Central Repository being searched.
* @param context The SearchContext which reflects the search
* being performed to get results for this
* key.
*/
SearchKey(String userName, List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod,
SleuthkitCase sleuthkitCase, CentralRepository centralRepository) {
SleuthkitCase sleuthkitCase, CentralRepository centralRepository, SearchContext context) {
this.groupAttributeType = groupAttributeType;
this.groupSortingType = groupSortingType;
this.sortingMethod = sortingMethod;
this.filters = filters;
this.context = context;
StringBuilder searchStringBuilder = new StringBuilder();
searchStringBuilder.append(userName);
@ -93,8 +98,8 @@ public class DiscoveryKeyUtils {
}
/**
* Construct a SearchKey without a SleuthkitCase or CentralRepositry
* instance.
* Construct a SearchKey without a SearchContext, SleuthkitCase or
* CentralRepositry instance.
*
* @param userName The name of the user performing the search.
* @param filters The Filters being used for the search.
@ -107,7 +112,8 @@ public class DiscoveryKeyUtils {
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod sortingMethod) {
this(userName, filters, groupAttributeType, groupSortingType,
sortingMethod, null, null);
sortingMethod, null, null, null);
//this constructor should only be used putting things directly into a map or getting if present since casedb, cr, and search context will be null
}
@Override
@ -141,6 +147,23 @@ public class DiscoveryKeyUtils {
return hash;
}
/**
* Get the SearchContext for the search this key is being used in.
*
* @return The SearchContext the search key is being used in.
*
* @throws DiscoveryException Thrown when the key being used has a null
* context indicating it was not created with
* knowledge of the case or central
* repository databases.
*/
SearchContext getContext() throws DiscoveryException {
if (context == null) {
throw new DiscoveryException("The key in use was created without a context and does not support retrieving information from the databases.");
}
return context;
}
/**
* Get the String representation of this key.
*

View File

@ -78,24 +78,31 @@ public class DomainSearch {
* @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null
* if not needed.
* @param context The SearchContext the search is being performed from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
public Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb);
domainSortingMethod, caseDb, centralRepoDb, context);
// Transform the cached results into a map of group key to group size.
final LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size());
}
@ -130,11 +137,11 @@ public class DomainSearch {
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod,
GroupKey groupKey, int startingEntry, int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
final Map<GroupKey, List<Result>> searchResults = searchCache.get(
userName, filters, groupAttributeType, groupSortingType,
domainSortingMethod, caseDb, centralRepoDb);
domainSortingMethod, caseDb, centralRepoDb, context);
final List<Result> domainsInGroup = searchResults.get(groupKey);
final List<Result> page = new ArrayList<>();
for (int i = startingEntry; (i < startingEntry + numberOfEntries)

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -55,20 +55,24 @@ class DomainSearchCache {
* @param caseDb The case database.
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return Domain search results matching the given parameters.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
Map<GroupKey, List<Result>> get(String userName,
List<AbstractFilter> filters,
DiscoveryAttributes.AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod domainSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
try {
final SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType,
groupSortingType, domainSortingMethod, caseDb, centralRepoDb);
groupSortingType, domainSortingMethod, caseDb, centralRepoDb, context);
return cache.get(searchKey);
} catch (ExecutionException ex) {
throw new DiscoveryException("Error fetching results from cache", ex.getCause());

View File

@ -73,7 +73,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException();
}
attr.addAttributeToResults(domainResults,
key.getSleuthkitCase(), key.getCentralRepository());
key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
}
// Apply secondary in memory filters
for (AbstractFilter filter : key.getFilters()) {
@ -81,7 +81,7 @@ class DomainSearchCacheLoader extends CacheLoader<SearchKey, Map<GroupKey, List<
throw new InterruptedException();
}
if (filter.useAlternateFilter()) {
domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository());
domainResults = filter.applyAlternateFilter(domainResults, key.getSleuthkitCase(), key.getCentralRepository(), key.getContext());
}
}
// Sort the ResultDomains by the requested criteria.

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -62,17 +62,21 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return The raw search results
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
static SearchResults runFileSearchDebug(String userName,
List<AbstractFilter> filters,
AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group
// and sort them. For example, if we're grouping by central repo frequency, we need
@ -82,10 +86,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb);
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb);
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -114,21 +118,28 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
public static Map<GroupKey, Integer> getGroupSizes(String userName,
List<AbstractFilter> filters,
AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
Map<GroupKey, List<Result>> searchResults = runFileSearch(userName, filters,
groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb);
groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
LinkedHashMap<GroupKey, Integer> groupSizes = new LinkedHashMap<>();
for (GroupKey groupKey : searchResults.keySet()) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before group sizes were finished being calculated");
}
groupSizes.put(groupKey, searchResults.get(groupKey).size());
}
return groupSizes;
@ -151,10 +162,14 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
public static List<Result> getFilesInGroup(String userName,
List<AbstractFilter> filters,
@ -164,7 +179,7 @@ public class FileSearch {
GroupKey groupKey,
int startingEntry,
int numberOfEntries,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
//the group should be in the cache at this point
List<Result> filesInGroup = null;
SearchKey searchKey = new SearchKey(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod);
@ -178,7 +193,7 @@ public class FileSearch {
List<Result> page = new ArrayList<>();
if (filesInGroup == null) {
logger.log(Level.INFO, "Group {0} was not cached, performing search to cache all groups again", groupKey);
runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb);
runFileSearch(userName, filters, groupAttributeType, groupSortingType, fileSortingMethod, caseDb, centralRepoDb, context);
synchronized (searchCache) {
resultsMap = searchCache.getIfPresent(searchKey.getKeyString());
}
@ -218,7 +233,6 @@ public class FileSearch {
TextSummarizer localSummarizer;
synchronized (searchCache) {
localSummarizer = SummaryHelpers.getLocalSummarizer();
}
if (localSummarizer != null) {
try {
@ -247,17 +261,21 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if
* not needed.
* @param context The SearchContext the search is being performed
* from.
*
* @return A LinkedHashMap grouped and sorted according to the parameters
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static Map<GroupKey, List<Result>> runFileSearch(String userName,
public static Map<GroupKey, List<Result>> runFileSearch(String userName,
List<AbstractFilter> filters,
AttributeType groupAttributeType,
Group.GroupSortingAlgorithm groupSortingType,
ResultsSorter.SortingMethod fileSortingMethod,
SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Make a list of attributes that we want to add values for. This ensures the
// ResultFile objects will have all needed fields set when it's time to group
@ -268,10 +286,10 @@ public class FileSearch {
attributesNeededForGroupingOrSorting.addAll(fileSortingMethod.getRequiredAttributes());
// Run the queries for each filter
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb);
List<Result> results = SearchFiltering.runQueries(filters, caseDb, centralRepoDb, context);
// Add the data to resultFiles for any attributes needed for sorting and grouping
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb);
addAttributes(attributesNeededForGroupingOrSorting, results, caseDb, centralRepoDb, context);
// Collect everything in the search results
SearchResults searchResults = new SearchResults(groupSortingType, groupAttributeType, fileSortingMethod);
@ -295,13 +313,17 @@ public class FileSearch {
* @param caseDb The case database
* @param centralRepoDb The central repository database. Can be null if not
* needed.
* @param context The SearchContext the search is being performed
* from.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb)
throws DiscoveryException {
private static void addAttributes(List<AttributeType> attrs, List<Result> results, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context)
throws DiscoveryException, SearchCancellationException {
for (AttributeType attr : attrs) {
attr.addAttributeToResults(results, caseDb, centralRepoDb);
attr.addAttributeToResults(results, caseDb, centralRepoDb, context);
}
}

View File

@ -0,0 +1,40 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
import java.util.concurrent.CancellationException;
/**
* Exception to be thrown when the search has been intentionally cancelled to
* provide information on where the code was when the cancellation took place.
*/
public class SearchCancellationException extends CancellationException {
private static final long serialVersionUID = 1L;
/**
* Construct a new SearchCancellationException with the specified message.
*
* @param message The text to use as the message for the exception.
*/
SearchCancellationException(String message) {
super(message);
}
}

View File

@ -0,0 +1,33 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Interface for providing feedback on if a search has been cancelled.
*
*/
public interface SearchContext {
/**
* Returns true if the search has been cancelled, false otherwise.
*
* @return True if the search has been cancelled, false otherwise.
*/
boolean searchIsCancelled();
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -60,10 +60,16 @@ public class SearchFiltering {
* @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters
* need it.
* @param context The SearchContext the search is being performed
* from.
*
* @return List of Results from the search performed.
*
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws DiscoveryException {
static List<Result> runQueries(List<AbstractFilter> filters, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (caseDb == null) {
throw new DiscoveryException("Case DB parameter is null"); // NON-NLS
}
@ -82,8 +88,11 @@ public class SearchFiltering {
// The file search filter is required, so this should never be empty.
throw new DiscoveryException("Selected filters do not include a case database query");
}
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled before result list could be retrieved.");
}
try {
return getResultList(filters, combinedQuery, caseDb, centralRepoDb);
return getResultList(filters, combinedQuery, caseDb, centralRepoDb, context);
} catch (TskCoreException ex) {
throw new DiscoveryException("Error querying case database", ex); // NON-NLS
}
@ -97,17 +106,23 @@ public class SearchFiltering {
* @param caseDb The case database.
* @param centralRepoDb The central repo. Can be null as long as no filters
* need it.
* @param context The SearchContext the search is being performed
* from.
*
* @return An ArrayList of Results returned by the query.
*
* @throws TskCoreException
* @throws DiscoveryException
* @throws SearchCancellationException - Thrown when the user has cancelled
* the search.
*/
private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb) throws TskCoreException, DiscoveryException {
private static List<Result> getResultList(List<AbstractFilter> filters, String combinedQuery, SleuthkitCase caseDb, CentralRepository centralRepoDb, SearchContext context) throws TskCoreException, DiscoveryException, SearchCancellationException {
// Get all matching abstract files
List<Result> resultList = new ArrayList<>();
List<AbstractFile> sqlResults = caseDb.findAllFilesWhere(combinedQuery);
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while the case database query was being performed.");
}
// If there are no results, return now
if (sqlResults.isEmpty()) {
return resultList;
@ -120,8 +135,11 @@ public class SearchFiltering {
// Now run any non-SQL filters.
for (AbstractFilter filter : filters) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while alternate filters were being applied.");
}
if (filter.useAlternateFilter()) {
resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb);
resultList = filter.applyAlternateFilter(resultList, caseDb, centralRepoDb, context);
}
// There are no matches for the filters run so far, so return
if (resultList.isEmpty()) {
@ -227,7 +245,7 @@ public class SearchFiltering {
public Collection<ARTIFACT_TYPE> getTypes() {
return Collections.unmodifiableCollection(types);
}
private StringJoiner joinStandardArtifactTypes() {
StringJoiner joiner = new StringJoiner(",");
for (ARTIFACT_TYPE type : types) {
@ -241,9 +259,10 @@ public class SearchFiltering {
StringJoiner joiner = joinStandardArtifactTypes();
return "artifact_type_id IN (" + joiner + ")";
}
/**
* Used by backend domain search code to query for additional artifact types.
* Used by backend domain search code to query for additional artifact
* types.
*/
String getWhereClause(List<ARTIFACT_TYPE> nonVisibleArtifactTypesToInclude) {
StringJoiner joiner = joinStandardArtifactTypes();
@ -674,14 +693,17 @@ public class SearchFiltering {
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
// Set the frequency for each file
DiscoveryAttributes.FrequencyAttribute freqAttr = new DiscoveryAttributes.FrequencyAttribute();
freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb);
freqAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
// If the frequency matches the filter, add the file to the results
List<Result> frequencyResults = new ArrayList<>();
for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Frequency alternate filter was being applied.");
}
if (frequencies.contains(file.getFrequency())) {
frequencyResults.add(file);
}
@ -705,7 +727,7 @@ public class SearchFiltering {
return Bundle.SearchFiltering_FrequencyFilter_desc(desc);
}
}
/**
* A filter for domains with known account types.
*/
@ -715,17 +737,20 @@ public class SearchFiltering {
public String getWhereClause() {
throw new UnsupportedOperationException("Not supported, this is an alternative filter.");
}
@Override
public boolean useAlternateFilter() {
return true;
}
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
List<Result> filteredResults = new ArrayList<>();
for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Known Account Type alternate filter was being applied.");
}
if (result instanceof ResultDomain) {
ResultDomain domain = (ResultDomain) result;
if (domain.hasKnownAccountType()) {
@ -745,9 +770,9 @@ public class SearchFiltering {
public String getDesc() {
return Bundle.SearchFiltering_KnownAccountTypeFilter_desc();
}
}
/**
* A filter for previously notable content in the central repository.
*/
@ -757,19 +782,22 @@ public class SearchFiltering {
public String getWhereClause() {
throw new UnsupportedOperationException("Not supported, this is an alternative filter.");
}
@Override
public boolean useAlternateFilter() {
return true;
}
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
DiscoveryAttributes.PreviouslyNotableAttribute previouslyNotableAttr = new DiscoveryAttributes.PreviouslyNotableAttribute();
previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb);
previouslyNotableAttr.addAttributeToResults(currentResults, caseDb, centralRepoDb, context);
List<Result> filteredResults = new ArrayList<>();
for (Result file : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Previously Notable alternate filter was being applied.");
}
if (file.getPreviouslyNotableInCR() == SearchData.PreviouslyNotable.PREVIOUSLY_NOTABLE) {
filteredResults.add(file);
}
@ -784,7 +812,7 @@ public class SearchFiltering {
public String getDesc() {
return Bundle.SearchFiltering_PreviouslyNotableFilter_desc();
}
}
/**
@ -1068,7 +1096,7 @@ public class SearchFiltering {
@Override
public List<Result> applyAlternateFilter(List<Result> currentResults, SleuthkitCase caseDb,
CentralRepository centralRepoDb) throws DiscoveryException {
CentralRepository centralRepoDb, SearchContext context) throws DiscoveryException, SearchCancellationException {
if (centralRepoDb == null) {
throw new DiscoveryException("Can not run Previously Notable filter with null Central Repository DB"); // NON-NLS
@ -1087,6 +1115,9 @@ public class SearchFiltering {
CorrelationAttributeInstance.Type type = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(CorrelationAttributeInstance.FILES_TYPE_ID);
for (Result result : currentResults) {
if (context.searchIsCancelled()) {
throw new SearchCancellationException("The search was cancelled while Notable alternate filter was being applied.");
}
ResultFile file = (ResultFile) result;
if (result.getType() == SearchData.Type.DOMAIN) {
break;

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -31,7 +31,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import javax.swing.SwingUtilities;
import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
@ -574,7 +573,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
}
private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed
// Get the selected filters
setVisible(false); //set visible used here instead of dispose incase dispose code changes
final DiscoveryTopComponent tc = DiscoveryTopComponent.getTopComponent();
if (tc == null) {
setValid("No Top Component Found");
@ -584,6 +583,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
tc.open();
}
tc.resetTopComponent();
// Get the selected filters
List<AbstractFilter> filters;
if (videosButton.isSelected()) {
filters = videoFilterPanel.getFilters();
@ -617,7 +617,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
}
searchWorker = new SearchWorker(centralRepoDb, type, filters, groupingAttr, groupSortAlgorithm, fileSort);
searchWorker.execute();
dispose();
tc.toFront();
tc.requestActive();
}//GEN-LAST:event_searchButtonActionPerformed
@ -651,6 +650,7 @@ final class DiscoveryDialog extends javax.swing.JDialog {
void cancelSearch() {
if (searchWorker != null) {
searchWorker.cancel(true);
searchWorker = null;
}
}
@ -750,7 +750,6 @@ final class DiscoveryDialog extends javax.swing.JDialog {
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()) {
shouldUpdate = shouldUpdateFilters(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), eventData, interestingItems);
}
}
} catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing.

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019-2020 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -260,7 +260,6 @@ public final class DiscoveryTopComponent extends TopComponent {
private void newSearchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_newSearchButtonActionPerformed
close();
final DiscoveryDialog discDialog = DiscoveryDialog.getDiscoveryDialogInstance();
discDialog.cancelSearch();
discDialog.setVisible(true);
discDialog.validateDialog();
}//GEN-LAST:event_newSearchButtonActionPerformed

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -36,6 +36,8 @@ import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.Result;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/**
* SwingWorker to retrieve the contents of a page.
@ -87,7 +89,7 @@ final class PageWorker extends SwingWorker<Void, Void> {
@Override
protected Void doInBackground() throws Exception {
SearchContext context = new SwingWorkerSearchContext(this);
try {
// Run the search
if (resultType == SearchData.Type.DOMAIN) {
@ -96,17 +98,22 @@ final class PageWorker extends SwingWorker<Void, Void> {
groupingAttribute,
groupSort,
fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo));
Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
} else {
results.addAll(FileSearch.getFilesInGroup(System.getProperty(USER_NAME_PROPERTY), searchfilters,
groupingAttribute,
groupSort,
fileSortMethod, groupKey, startingEntry, pageSize,
Case.getCurrentCase().getSleuthkitCase(), centralRepo));
Case.getCurrentCase().getSleuthkitCase(), centralRepo, context));
}
} catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex);
cancel(true);
} catch (SearchCancellationException ex) {
//The user does not explicitly have a way to cancel the loading of a page
//but they could have cancelled the search during the loading of the first page
//So this may or may not be an issue depending on when this occurred.
logger.log(Level.WARNING, "Search was cancelled while retrieving data for results page with starting entry: " + startingEntry, ex);
}
return null;
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -35,6 +35,8 @@ import org.sleuthkit.autopsy.discovery.search.FileSearch;
import org.sleuthkit.autopsy.discovery.search.DiscoveryException;
import org.sleuthkit.autopsy.discovery.search.DomainSearch;
import org.sleuthkit.autopsy.discovery.search.ResultsSorter;
import org.sleuthkit.autopsy.discovery.search.SearchCancellationException;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
import org.sleuthkit.autopsy.discovery.search.SearchData;
/**
@ -75,23 +77,28 @@ final class SearchWorker extends SwingWorker<Void, Void> {
protected Void doInBackground() throws Exception {
try {
// Run the search
SearchContext context = new SwingWorkerSearchContext(this);
if (searchType == SearchData.Type.DOMAIN) {
DomainSearch domainSearch = new DomainSearch();
results.putAll(domainSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr,
groupSortAlgorithm,
fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb));
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
} else {
results.putAll(FileSearch.getGroupSizes(System.getProperty(USER_NAME_PROPERTY), filters,
groupingAttr,
groupSortAlgorithm,
fileSort,
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb));
Case.getCurrentCase().getSleuthkitCase(), centralRepoDb, context));
}
} catch (DiscoveryException ex) {
logger.log(Level.SEVERE, "Error running file search test", ex);
logger.log(Level.SEVERE, "Error running file search test.", ex);
cancel(true);
} catch (SearchCancellationException ex) {
//search cancellation exceptions should indicate that the user chose to cancell this search
//so would not be a problem but we might be curious what was being done when it was cancelled
logger.log(Level.INFO, "Discovery search was cancelled.", ex);
}
return null;
}

View File

@ -0,0 +1,45 @@
/*
* Autopsy
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.ui;
import javax.swing.SwingWorker;
import org.sleuthkit.autopsy.discovery.search.SearchContext;
/**
* Implementation of SearchContext for searches being performed in the
* background thread of a SwingWorker.
*/
class SwingWorkerSearchContext implements SearchContext {
private final SwingWorker<Void, Void> searchWorker;
/**
* Construct a new SwingWorkerSearchContext.
*
* @param worker The SwingWorker the search is being performed in.
*/
SwingWorkerSearchContext(SwingWorker<Void, Void> worker) {
searchWorker = worker;
}
@Override
public boolean searchIsCancelled() {
return searchWorker.isCancelled();
}
}

View File

@ -48,6 +48,15 @@ class DataSourceFilter extends AbstractFileSearchFilter<DataSourcePanel> {
return this.getComponent().isSelected();
}
/**
* Set the data source filter to select the specified data source initially.
*
* @param dataSourceId - The data source to select.
*/
void setSelectedDataSource(long dataSourceId) {
this.getComponent().setDataSourceSelected(dataSourceId);
}
/**
* Reset the data source filter to be up to date with the current case.
*/

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -152,6 +152,19 @@ public class DataSourcePanel extends javax.swing.JPanel {
this.dataSourceNoteLabel.setEnabled(enabled);
}
/**
* Set the data source initially selected in this filter.
*
* @param dataSourceId - The object ID of the data source which will be
* selected.
*/
void setDataSourceSelected(long dataSourceId) {
this.dataSourceCheckBox.setSelected(true);
setComponentsEnabled();
String dataSourceName = dataSourceMap.get(dataSourceId);
dataSourceList.setSelectedValue(dataSourceName, true);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always

View File

@ -32,6 +32,7 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
private static final long serialVersionUID = 1L;
private static FileSearchAction instance = null;
private static FileSearchDialog searchDialog;
private static Long selectedDataSourceId;
FileSearchAction() {
super();
@ -39,7 +40,7 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), (PropertyChangeEvent evt) -> {
if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) {
setEnabled(evt.getNewValue() != null);
if (searchDialog != null && evt.getNewValue() != null){
if (searchDialog != null && evt.getNewValue() != null) {
searchDialog.resetCaseDependentFilters();
}
}
@ -57,7 +58,9 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
public void actionPerformed(ActionEvent e) {
if (searchDialog == null) {
searchDialog = new FileSearchDialog();
}
}
//Preserve whatever the previously selected data source was
selectedDataSourceId = null;
searchDialog.setVisible(true);
}
@ -66,6 +69,8 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
if (searchDialog == null) {
searchDialog = new FileSearchDialog();
}
//
searchDialog.setSelectedDataSourceFilter(selectedDataSourceId);
searchDialog.setVisible(true);
}
@ -85,7 +90,15 @@ final class FileSearchAction extends CallableSystemAction implements FileSearchP
}
@Override
public void showDialog() {
public void showDialog(Long dataSourceId) {
selectedDataSourceId = dataSourceId;
performAction();
}
@Override
@Deprecated
public void showDialog() {
showDialog(null);
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -29,6 +29,8 @@ import org.openide.windows.WindowManager;
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
final class FileSearchDialog extends javax.swing.JDialog {
private static final long serialVersionUID = 1L;
/**
* Creates new form FileSearchDialog
*/
@ -48,6 +50,15 @@ final class FileSearchDialog extends javax.swing.JDialog {
});
}
/**
* Set the data source filter to select the specified data source initially.
*
* @param dataSourceId - The data source to select.
*/
void setSelectedDataSourceFilter(long dataSourceId) {
fileSearchPanel1.setDataSourceFilter(dataSourceId);
}
/**
* Reset the filters which are populated with options based on the contents
* of the current case.

View File

@ -57,13 +57,13 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
class FileSearchPanel extends javax.swing.JPanel {
private static final Logger logger = Logger.getLogger(FileSearchPanel.class.getName());
private static final long serialVersionUID = 1L;
private final List<FileSearchFilter> filters = new ArrayList<>();
private static int resultWindowCount = 0; //keep track of result windows so they get unique names
private static MimeTypeFilter mimeTypeFilter = new MimeTypeFilter();
private static DataSourceFilter dataSourceFilter = new DataSourceFilter();
private static final MimeTypeFilter mimeTypeFilter = new MimeTypeFilter();
private static final DataSourceFilter dataSourceFilter = new DataSourceFilter();
private static final String EMPTY_WHERE_CLAUSE = NbBundle.getMessage(DateSearchFilter.class, "FileSearchPanel.emptyWhereClause.text");
private static SwingWorker<TableFilterNode, Void> searchWorker = null;
@ -106,7 +106,6 @@ class FileSearchPanel extends javax.swing.JPanel {
DateSearchFilter dateFilter = new DateSearchFilter();
KnownStatusSearchFilter knowStatusFilter = new KnownStatusSearchFilter();
HashSearchFilter hashFilter = new HashSearchFilter();
panel2.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.name"), nameFilter));
panel3.add(new FilterArea(NbBundle.getMessage(this.getClass(), "FileSearchPanel.filterTitle.metadata"), sizeFilter));
@ -149,6 +148,15 @@ class FileSearchPanel extends javax.swing.JPanel {
searchButton.setEnabled(isValidSearch());
}
/**
* Set the data source filter to select the specified data source initially.
*
* @param dataSourceId - The data source to select.
*/
void setDataSourceFilter(long dataSourceId) {
dataSourceFilter.setSelectedDataSource(dataSourceId);
}
/**
* @return true if any of the filters in the panel are enabled (checked)
*/
@ -334,7 +342,7 @@ class FileSearchPanel extends javax.swing.JPanel {
return enabledFilters;
}
/**
* Reset the filters which are populated with options based on the contents
* of the current case.

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2011-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");

View File

@ -10,7 +10,6 @@ OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0}
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream.
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull=Full Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}.

View File

@ -23,7 +23,6 @@ OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0}
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.read.exception.errReadStream=Error reading content stream.
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel=Content-only Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull=Full Encryption (Archive File)
EmbeddedFileExtractorIngestModule.ArchiveExtractor.init.errInitModule.details=Error initializing output dir: {0}: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg=Possible ZIP bomb detected in archive: {0}, item: {1}
EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails=Compression ratio is {0}, skipping items in {1}.

View File

@ -58,6 +58,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.modules.encryptiondetection.EncryptionDetectionModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMonitor;
@ -95,8 +96,7 @@ class SevenZipExtractor {
//encryption type strings
private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel");
private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull");
private static final String ENCRYPTION_FULL = EncryptionDetectionModuleFactory.PASSWORD_PROTECT_MESSAGE;
//zip bomb detection
private static final int MAX_DEPTH = 4;

View File

@ -83,7 +83,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
private Blackboard blackboard;
private IngestJobContext context;
private double calculatedEntropy;
private final double minimumEntropy;
private final int minimumFileSize;
private final boolean fileSizeMultipleEnforced;
@ -119,7 +119,6 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
}
@Messages({
"EncryptionDetectionFileIngestModule.artifactComment.password=Password protection detected.",
"EncryptionDetectionFileIngestModule.artifactComment.suspected=Suspected encryption due to high entropy (%f)."
})
@Override
@ -160,7 +159,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
String.format(Bundle.EncryptionDetectionFileIngestModule_artifactComment_suspected(), calculatedEntropy));
} else if (isFilePasswordProtected(file)) {
return flagFile(file, BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE,
Bundle.EncryptionDetectionFileIngestModule_artifactComment_password());
EncryptionDetectionModuleFactory.PASSWORD_PROTECT_MESSAGE);
}
}
} catch (ReadContentInputStreamException | SAXException | TikaException | UnsupportedCodecException ex) {

View File

@ -36,15 +36,19 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
@ServiceProvider(service = IngestModuleFactory.class)
@Messages({
"EncryptionDetectionFileIngestModule.moduleName.text=Encryption Detection",
"EncryptionDetectionFileIngestModule.getDesc.text=Looks for files with the specified minimum entropy."
"EncryptionDetectionFileIngestModule.getDesc.text=Looks for files with the specified minimum entropy.",
"EncryptionDetectionFileIngestModule.artifactComment.password=Password protection detected.",
})
public class EncryptionDetectionModuleFactory implements IngestModuleFactory {
public static final String PASSWORD_PROTECT_MESSAGE = Bundle.EncryptionDetectionFileIngestModule_artifactComment_password();
@Override
public String getModuleDisplayName() {
return getModuleName();
}
/**
* Get the name of the module.
*

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -52,7 +52,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -88,7 +88,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -121,7 +121,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.NoGroupingAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_NAME,
ResultsSorter.SortingMethod.BY_DATA_SOURCE,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -155,7 +155,7 @@ public class DomainSearchCacheLoaderTest {
new DiscoveryAttributes.DataSourceAttribute(),
Group.GroupSortingAlgorithm.BY_GROUP_SIZE,
ResultsSorter.SortingMethod.BY_DOMAIN_NAME,
caseDb, null);
caseDb, null, new TestSearchContextImpl(false));
DomainSearchCacheLoader loader = mock(DomainSearchCacheLoader.class);
when(loader.getResultDomainsFromDatabase(key)).thenReturn(domains);
@ -173,4 +173,5 @@ public class DomainSearchCacheLoaderTest {
}
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2020 Basis Technology Corp.
* Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey;
@ -46,11 +45,11 @@ public class DomainSearchTest {
);
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue());
}
@ -81,11 +80,11 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(4, sizes.get(groupOne).longValue());
assertEquals(3, sizes.get(groupTwo).longValue());
assertEquals(1, sizes.get(groupThree).longValue());
@ -95,11 +94,11 @@ public class DomainSearchTest {
public void groupSizes_EmptyGroup_ShouldBeSizeZero() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(new HashMap<>());
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(new HashMap<>());
DomainSearch domainSearch = new DomainSearch(cache, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null);
Map<GroupKey, Integer> sizes = domainSearch.getGroupSizes(null,
new ArrayList<>(), null, null, null, null, null, new TestSearchContextImpl(false));
assertEquals(0, sizes.size());
}
@ -120,17 +119,17 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i));
}
}
@Test
public void getDomains_SingleGroupOverSizedPage_ShouldContainAllDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -148,17 +147,17 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 100, null, null, new TestSearchContextImpl(false));
assertEquals(4, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i));
}
}
@Test
public void getDomains_SingleGroupHalfPage_ShouldContainHalfDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -176,18 +175,18 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(domains.get(i), firstPage.get(i));
}
}
@Test
@Test
public void getDomains_SingleGroupLastPageLastDomain_ShouldContainLastDomain() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -204,15 +203,15 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 3, 1, null, null, new TestSearchContextImpl(false));
assertEquals(1, firstPage.size());
assertEquals(domains.get(domains.size() - 1), firstPage.get(0));
}
@Test
public void getDomains_SingleGroupOversizedOffset_ShouldContainNoDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -230,14 +229,14 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 20, 5, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size());
}
@Test
public void getDomains_SingleGroupZeroSizedPage_ShouldContainNoDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -255,14 +254,14 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 0, null, null, new TestSearchContextImpl(false));
assertEquals(0, firstPage.size());
}
@Test
public void getDomains_MultipleGroupsFullPage_ShouldContainAllDomainsInGroup() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -290,14 +289,14 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, 0, 3, null, null, new TestSearchContextImpl(false));
assertEquals(3, firstPage.size());
}
@Test
public void getDomains_MultipleGroupsHalfPage_ShouldContainHalfDomainsInGroup() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -325,17 +324,17 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null);
List<Result> firstPage = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupTwo, 1, 2, null, null, new TestSearchContextImpl(false));
assertEquals(2, firstPage.size());
for (int i = 0; i < firstPage.size(); i++) {
assertEquals(dummyData.get(groupTwo).get(i + 1), firstPage.get(i));
}
}
@Test
public void getDomains_SingleGroupSimulatedPaging_ShouldPageThroughAllDomains() throws DiscoveryException {
DomainSearchCache cache = mock(DomainSearchCache.class);
@ -357,20 +356,20 @@ public class DomainSearchTest {
}
};
when(cache.get(null, new ArrayList<>(), null, null, null, null, null)).thenReturn(dummyData);
when(cache.get(isNull(), eq(new ArrayList<>()), isNull(), isNull(), isNull(), isNull(), isNull(), any(SearchContext.class))).thenReturn(dummyData);
DomainSearch domainSearch = new DomainSearch(cache, null, null);
int start = 0;
int size = 2;
while (start + size <= domains.size()) {
List<Result> page = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, start, size, null, null);
List<Result> page = domainSearch.getDomainsInGroup(null,
new ArrayList<>(), null, null, null, groupOne, start, size, null, null, new TestSearchContextImpl(false));
assertEquals(2, page.size());
for(int i = 0; i < page.size(); i++) {
for (int i = 0; i < page.size(); i++) {
assertEquals(domains.get(start + i), page.get(i));
}
start += size;
}
}
@ -379,7 +378,7 @@ public class DomainSearchTest {
private final String name;
public DummyKey(String name) {
DummyKey(String name) {
this.name = name;
}

View File

@ -0,0 +1,37 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.discovery.search;
/**
* Implementation of SearchContext for testing to ensure NPEs are not thrown and
* the context indicates the expected cancellation status.
*/
public class TestSearchContextImpl implements SearchContext {
private final boolean isCancelled;
public TestSearchContextImpl(boolean hasBeenCancelled) {
isCancelled = hasBeenCancelled;
}
@Override
public boolean searchIsCancelled() {
return isCancelled;
}
}

View File

@ -30,32 +30,35 @@ import javax.annotation.concurrent.Immutable;
public final class Manifest implements Serializable {
private static final long serialVersionUID = 1L;
private final String filePath;
private final Path filePath;
private final Date dateFileCreated;
private final String caseName;
private final String deviceId;
private final String dataSourcePath;
private final Path dataSourcePath;
private final String dataSourceFileName;
private final Map<String, String> manifestProperties;
public Manifest(Path manifestFilePath, Date dateFileCreated, String caseName, String deviceId, Path dataSourcePath, Map<String, String> manifestProperties) {
this.filePath = manifestFilePath.toString();
this.dateFileCreated = dateFileCreated;
this.filePath = Paths.get(manifestFilePath.toString());
this.dateFileCreated = new Date(dateFileCreated.getTime());
this.caseName = caseName;
this.deviceId = deviceId;
if (null != dataSourcePath) {
this.dataSourcePath = dataSourcePath.toString();
this.dataSourcePath = Paths.get(dataSourcePath.toString());
dataSourceFileName = dataSourcePath.getFileName().toString();
} else {
this.dataSourcePath = "";
this.dataSourcePath = Paths.get("");
dataSourceFileName = "";
}
this.manifestProperties = new HashMap<>(manifestProperties);
}
public Path getFilePath() {
return Paths.get(this.filePath);
return this.filePath;
}
public Date getDateFileCreated() {
return new Date(this.dateFileCreated.getTime());
return dateFileCreated;
}
public String getCaseName() {
@ -67,11 +70,11 @@ public final class Manifest implements Serializable {
}
public Path getDataSourcePath() {
return Paths.get(dataSourcePath);
return dataSourcePath;
}
public String getDataSourceFileName() {
return Paths.get(dataSourcePath).getFileName().toString();
return dataSourceFileName;
}
public Map<String, String> getManifestProperties() {

View File

@ -177,14 +177,6 @@ final class ChromeCacheExtractor {
currentCase = Case.getCurrentCaseThrows();
fileManager = currentCase.getServices().getFileManager();
// Create an output folder to save any derived files
absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId());
relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString();
File dir = new File(absOutputFolderName);
if (dir.exists() == false) {
dir.mkdirs();
}
} catch (NoCurrentCaseException ex) {
String msg = "Failed to get current case."; //NON-NLS
throw new IngestModuleException(msg, ex);
@ -279,6 +271,17 @@ final class ChromeCacheExtractor {
// Identify each cache folder by searching for the index files in each
List<AbstractFile> indexFiles = findIndexFiles();
if (indexFiles.size() > 0) {
// Create an output folder to save any derived files
absOutputFolderName = RAImageIngestModule.getRAOutputPath(currentCase, moduleName, context.getJobId());
relOutputFolderName = Paths.get(RAImageIngestModule.getRelModuleOutputPath(currentCase, moduleName, context.getJobId())).normalize().toString();
File dir = new File(absOutputFolderName);
if (dir.exists() == false) {
dir.mkdirs();
}
}
// Process each of the cache folders
for (AbstractFile indexFile: indexFiles) {

View File

@ -76,29 +76,28 @@ With our connection in hand, we can do some queries. In our sample database, we
stmt = dbConn.createStatement()
resultSet = stmt.executeQuery("SELECT * FROM contacts")\endverbatim
For each row, we are going to get the values for the name, e-mail, and phone number and make a TSK_CONTACT artifact. Recall from the first tutorial that posting artifacts to the blackboard allows modules to communicate with each other and also allows you to easily display data to the user. The TSK_CONTACT artifact is for storing contact information.
For each row, we are going to get the values for the name, e-mail, and phone number and make a TSK_CONTACT artifact. Recall from the first tutorial that posting artifacts to the blackboard allows modules to communicate with each other and also allows you to easily display data to the user. The TSK_CONTACT artifact is for storing contact information. The <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest/artifact_catalog_page.html">artifact catalog</a> shows that TSK_CONTACT is a data artifact, so we will be using the newDataArtifact() method to create each one.
The basic approach in our example is to make an artifact of a given type (TSK_CONTACT) and have it be associated with the database it came from. We then make attributes for the name, email, and phone. The following code does this for each row in the database:
\verbatim
while resultSet.next():
# Make an artifact on the blackboard and give it attributes
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT)
name = resultSet.getString("name")
art.addAttribute(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID(),
ContactsDbIngestModuleFactory.moduleName, name))
email = resultSet.getString("email")
art.addAttribute(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID(),
ContactsDbIngestModuleFactory.moduleName, email))
phone = resultSet.getString("phone")
art.addAttribute(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(),
ContactsDbIngestModuleFactory.moduleName, phone))\endverbatim
try:
name = resultSet.getString("name")
email = resultSet.getString("email")
phone = resultSet.getString("phone")
except SQLException as e:
self.log(Level.INFO, "Error getting values from contacts table (" + e.getMessage() + ")")
# Make an artifact on the blackboard, TSK_CONTACT and give it attributes for each of the fields
art = file.newDataArtifact(BlackboardArtifact.Type.TSK_CONTACT, Arrays.asList(
BlackboardAttribute(BlackboardAttribute.Type.TSK_NAME_PERSON,
ContactsDbIngestModuleFactory.moduleName, name),
BlackboardAttribute(BlackboardAttribute.Type.TSK_EMAIL,
ContactsDbIngestModuleFactory.moduleName, email),
BlackboardAttribute(BlackboardAttribute.Type.TSK_PHONE_NUMBER,
ContactsDbIngestModuleFactory.moduleName, phone)
))\endverbatim
That's it. We've just found the databases, queried them, and made artifacts for the user to see. There are some final things though. First, we should fire off an event so that the UI updates and refreshes with the new artifacts. We can fire just one event after each database is parsed (or you could fire one for each artifact - it's up to you).
@ -113,6 +112,8 @@ stmt.close()
dbConn.close()
os.remove(lclDbPath)\endverbatim
The final version of findContactsDb.py can be found on <a href="https://github.com/sleuthkit/autopsy/blob/develop/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb.py">github</a>.
\subsection python_tutorial2_niceties Niceties
Data source-level ingest modules can run for quite some time. Therefore, data source-level ingest modules should do some additional things that file-level ingest modules do not need to.

View File

@ -75,75 +75,65 @@ The process() method is passed in a reference to an AbstractFile Object. With th
Now that we have found the files, we want to do something with them. In our situation, we just want to alert the user to them. We do this by making an "Interesting Item" blackboard artifact. The <a href="https://sleuthkit.org/sleuthkit/docs/jni-docs/latest/mod_bbpage.html" target="_blank" rel="noopener noreferrer">Blackboard</a> is where ingest modules can communicate with each other and with the Autopsy GUI. The blackboard has a set of artifacts on it and each artifact:</p>
<ul>
<li>Has a type</li>
<li>Has a category</li>
<li>Is associated with a file</li>
<li>Has one or more attributes. Attributes are simply name and value pairs.</li>
</ul>
For our example, we are going to make an artifact of type "TSK_INTERESTING_FILE" whenever we find a big and round file. These are one of the most generic artifact types and are simply a way of alerting the user that a file is interesting for some reason. Once you make the artifact, it will be shown in the UI. The below code makes an artifact for the file and puts it into the set of "Big and Round Files". You can create whatever set names you want. The Autopsy GUI organizes Interesting Files by their set name.
A list of standard artifact types can be found in the <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest/artifact_catalog_page.html">artifact catalog</a>. It is important to note the catagory for the artifact you want to since this affects which method you will use to create the artifact.
For our example, we are going to make an artifact of type "TSK_INTERESTING_FILE", which is an analysis result, whenever we find a big and round file. These are one of the most generic artifact types and are simply a way of alerting the user that a file is interesting for some reason. Once you make the artifact, it will be shown in the UI. The below code makes an artifact for the file and puts it into the set of "Big and Round Files". You can create whatever set names you want. The Autopsy GUI organizes Interesting Files by their set name.
\verbatim
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(),
FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files")
art.addAttribute(att)\endverbatim
art = file.newAnalysisResult(BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE,
None, "Big and Round Files", None,
Arrays.asList(
BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME,
FindBigRoundFilesIngestModuleFactory.moduleName,
"Big and Round Files"))).getAnalysisResult()\endverbatim
The above code adds the artifact and a single attribute to the blackboard in the embedded database, but it does not notify other modules or the UI. The UI will eventually refresh, but it is faster to fire an event with this:
The above code adds the artifact and a single attribute to the blackboard in the embedded database, but it does not notify other modules or the UI. Calling postArtifact() will let the tree viewer and other parts of the UI know that a refresh may be necessary, and passes the newly created artifacts to other modules that may do further processing on it.
\verbatim
IngestServices.getInstance().fireModuleDataEvent(
ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName,
BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))\endverbatim
blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName)\endverbatim
That's it. Your process() method should look something like this:
\verbatim
def process(self, file):
# Use blackboard class to index blackboard artifacts for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
# Skip non-files
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or
(file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or
(file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or
(file.isFile() == False)):
return IngestModule.ProcessResult.OK
# Look for files bigger than 10MB that are a multiple of 4096
if ((file.getSize() &gt; 10485760) and ((file.getSize() % 4096) == 0)):
# Look for files bigger than 10MB that are a multiple of 4096
if ((file.getSize() > 10485760) and ((file.getSize() % 4096) == 0)):
# Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of
# artifact. Refer to the developer docs for other examples.
art = file.newAnalysisResult(BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE,
None, "Big and Round Files", None,
Arrays.asList(
BlackboardAttribute(BlackboardAttribute.Type.TSK_SET_NAME,
FindBigRoundFilesIngestModuleFactory.moduleName,
"Big and Round Files"))).getAnalysisResult()
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
try:
# post the artifact for listeners of artifact events
blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName)
except Blackboard.BlackboardException as e:
self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName())
att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(),
FindBigRoundFilesIngestModuleFactory.moduleName, "Big and Round Files")
art.addAttribute(att)
# Fire an event to notify the UI and others that there is a new artifact
IngestServices.getInstance().fireModuleDataEvent(
ModuleDataEvent(FindBigRoundFilesIngestModuleFactory.moduleName,
BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, None))
return IngestModule.ProcessResult.OK\endverbatim
return IngestModule.ProcessResult.OK\endverbatim
Save this file and run the module on some of your data. If you have any big and round files, you should see an entry under the "Interesting Items" node in the tree.
\image html bigAndRoundFiles.png
The full big and round file module along with test data can be found on <a href="https://github.com/sleuthkit/autopsy/tree/develop/pythonExamples/July2015FileTutorial_BigRound">github</a>.
\subsection python_tutorial1_debug Debugging and Development Tips
Whenever you have syntax errors or other errors in your script, you will get some form of dialog from Autopsy when you try to run ingest modules. If that happens, fix the problem and run ingest modules again. You don't need to restart Autopsy each time!

View File

@ -45,7 +45,7 @@ A third approach is to call org.sleuthkit.autopsy.casemodule.Case.getDataSources
\subsubsection python_tutorial3_getting_artifacts Getting Blackboard Artifacts
The blackboard is where modules store their analysis results. If you want to include them in your report, then there are several methods that you could use. If you want all artifacts of a given type, then you can use <a href="https://sleuthkit.org/sleuthkit/docs/jni-docs/latest/classorg_1_1sleuthkit_1_1datamodel_1_1_sleuthkit_case.html#a0b8396fac6c40d8291cc48732dd15d74">SleuthkitCase.getBlackboardArtifacts()</a>. There are many variations of this method that take different arguments. Look at them to find the one that is most convenient for you.
The blackboard is where modules store their analysis results. If you want to include them in your report, then there are several methods that you could use. If you want all artifacts of a given type, then you can use <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest//classorg_1_1sleuthkit_1_1datamodel_1_1_blackboard.html#af7261eb61cd05a4d457910eed599dd54">getDataArtifacts()</a>or <a href="http://sleuthkit.org/sleuthkit/docs/jni-docs/latest//classorg_1_1sleuthkit_1_1datamodel_1_1_blackboard.html#a563cbd08810a1b31ef2ecf0ebf0b7356">Blackboard.getAnalysisResultsByType()</a>. There are variations of these methods that take different arguments. Look at them to find the one that is most convenient for you.
\subsubsection python_tutorial3_getting_tags Getting Tagged Files or Artifacts