Resolved merge conflicts. Regex bug fix

This commit is contained in:
Eugene Livis 2021-12-06 17:40:26 -05:00
commit c8625041a3
118 changed files with 7448 additions and 3164 deletions

View File

@ -425,9 +425,14 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel {
// TBD: there probably should be an error level for each error
addErrors(err, critErr);
}
//notify the UI of the new content added to the case
final Level level = critErr ? Level.SEVERE : Level.WARNING;
new Thread(() -> {
//log error messages as Severe if there was a critical error otherwise as Warning.
//logging performed off of UI thread
for (String err : errList) {
Logger.getLogger(AddImageWizardAddingProgressVisual.class.getName()).log(level, "DatasourceID: {0} Error Message: {1}", new Object[]{dataSourceId.toString(), err});
}
//notify the UI of the new content added to the case
try {
if (!contents.isEmpty()) {
Case.getCurrentCaseThrows().notifyDataSourceAdded(contents.get(0), dataSourceId);

View File

@ -12,6 +12,7 @@ CentralRepoDbChoice.PostgreSQL.Text=Custom PostgreSQL
CentralRepoDbChoice.PostgreSQL_Multiuser.Text=PostgreSQL using multi-user settings
CentralRepoDbChoice.Sqlite.Text=SQLite
CentralRepoDbManager.connectionErrorMsg.text=Failed to connect to central repository database.
CentralRepositoryService.progressMsg.startingListener=Starting events listener...
CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates...
CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database....
CentralRepositoryService.serviceName=Central Repository Service

View File

@ -310,18 +310,25 @@ public interface CentralRepository {
List<CorrelationAttributeInstance> getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List<String> values) throws CentralRepoException, CorrelationAttributeNormalizationException;
/**
* Retrieves eamArtifact instances from the database that are associated
* with the eamArtifactType and eamArtifactValue of the given eamArtifact.
* Retrieves correlation attribute instances from the central repository
* that match a given attribute type and value.
*
* @param aType The type of the artifact
* @param value The correlation value
* @param type The correlation attribute type.
* @param value The correlation attribute value.
*
* @return List of artifact instances for a given type/value
* @return The matching correlation attribute instances.
*
* @throws CorrelationAttributeNormalizationException
* @throws CentralRepoException
* @throws CorrelationAttributeNormalizationException The exception is
* thrown if the supplied
* correlation attribute
* value cannot be
* normlaized.
* @throws CentralRepoException The exception is
* thrown if there is an
* error querying the
* central repository.
*/
List<CorrelationAttributeInstance> getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws CentralRepoException, CorrelationAttributeNormalizationException;
List<CorrelationAttributeInstance> getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type type, String value) throws CentralRepoException, CorrelationAttributeNormalizationException;
/**
* Retrieves eamArtifact instances from the database that are associated

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2018-2020 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import org.sleuthkit.autopsy.appservices.AutopsyService;
import org.sleuthkit.autopsy.progress.ProgressIndicator;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.CaseEventListener;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.TskCoreException;
@ -36,8 +35,7 @@ import org.sleuthkit.datamodel.TskCoreException;
public class CentralRepositoryService implements AutopsyService {
private CaseEventListener caseEventListener = new CaseEventListener();
private IngestEventsListener ingestEventListener = new IngestEventsListener();
@Override
@NbBundle.Messages({
"CentralRepositoryService.serviceName=Central Repository Service"
@ -47,7 +45,8 @@ public class CentralRepositoryService implements AutopsyService {
}
@NbBundle.Messages({
"CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates..."
"CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates...",
"CentralRepositoryService.progressMsg.startingListener=Starting events listener..."
})
@Override
public void openCaseResources(CaseContext context) throws AutopsyServiceException {
@ -58,21 +57,20 @@ public class CentralRepositoryService implements AutopsyService {
ProgressIndicator progress = context.getProgressIndicator();
progress.progress(Bundle.CentralRepositoryService_progressMsg_updatingSchema());
updateSchema();
if (context.cancelRequested()) {
return;
}
dataUpgradeForVersion1dot2(context.getCase());
if (context.cancelRequested()) {
return;
}
progress.progress(Bundle.CentralRepositoryService_progressMsg_startingListener());
caseEventListener = new CaseEventListener();
caseEventListener.installListeners();
ingestEventListener = new IngestEventsListener();
ingestEventListener.installListeners();
caseEventListener.startUp();
}
@NbBundle.Messages({
"CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database...."
})
@ -80,22 +78,16 @@ public class CentralRepositoryService implements AutopsyService {
public void closeCaseResources(CaseContext context) throws AutopsyServiceException {
ProgressIndicator progress = context.getProgressIndicator();
progress.progress(Bundle.CentralRepositoryService_progressMsg_waitingForListeners());
if (caseEventListener != null) {
caseEventListener.uninstallListeners();
caseEventListener.shutdown();
}
if (ingestEventListener != null) {
ingestEventListener.uninstallListeners();
ingestEventListener.shutdown();
}
}
/**
* Updates the central repository schema to the latest version.
* Updates the central repository database schema to the latest version.
*
* @throws AutopsyServiceException
* @throws AutopsyServiceException The exception is thrown if there is an
* error updating the database schema.
*/
private void updateSchema() throws AutopsyServiceException {
try {
@ -107,10 +99,11 @@ public class CentralRepositoryService implements AutopsyService {
/**
* Adds missing data source object IDs from data sources in this case to the
* corresponding records in the central repository. This is a data update to
* go with the v1.2 schema update.
* corresponding records in the central repository database. This is a data
* update to go with the v1.2 schema update.
*
* @throws AutopsyServiceException
* @throws AutopsyServiceException The exception is thrown if there is an
* error updating the database.
*/
private void dataUpgradeForVersion1dot2(Case currentCase) throws AutopsyServiceException {
try {

View File

@ -40,7 +40,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.InvalidAccountIDException;
import org.sleuthkit.datamodel.OsAccount;
@ -114,8 +113,54 @@ public class CorrelationAttributeUtil {
return Collections.emptyList();
}
public static List<CorrelationAttributeInstance> makeCorrAttrsToSave(OsAccountInstance osAccountInstance) {
return makeCorrAttrsForSearch(osAccountInstance);
/**
* Gets the correlation attributes for an OS account instance represented as
* an OS account plus a data source.
*
* @param account The OS account.
* @param dataSource The data source.
*
* @return The correlation attributes.
*/
public static List<CorrelationAttributeInstance> makeCorrAttrsToSave(OsAccount account, Content dataSource) {
List<CorrelationAttributeInstance> correlationAttrs = new ArrayList<>();
if (CentralRepository.isEnabled()) {
Optional<String> accountAddr = account.getAddr();
if (accountAddr.isPresent() && !isSystemOsAccount(accountAddr.get())) {
try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
accountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource),
dataSource.getName(),
"",
TskData.FileKnown.KNOWN,
account.getId());
correlationAttrs.add(correlationAttributeInstance);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Error querying central repository for OS account '%s'", accountAddr.get()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, String.format("Error getting current case for OS account '%s'", accountAddr.get()), ex); //NON-NLS
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, String.format("Error normalizing correlation attribute for OS account '%s'", accountAddr.get()), ex); //NON-NLS
}
}
}
return correlationAttrs;
}
/**
* Determines whether or not a given OS account address is a system account
* address.
*
* @param accountAddr The OS account address.
*
* @return True or false.
*/
private static boolean isSystemOsAccount(String accountAddr) {
return accountAddr.equals("S-1-5-18") || accountAddr.equals("S-1-5-19") || accountAddr.equals("S-1-5-20");
}
/**
@ -787,43 +832,11 @@ public class CorrelationAttributeUtil {
public static List<CorrelationAttributeInstance> makeCorrAttrsForSearch(OsAccountInstance osAccountInst) {
List<CorrelationAttributeInstance> correlationAttrs = new ArrayList<>();
if (CentralRepository.isEnabled()) {
OsAccount account = null;
DataSource dataSource = null;
if (osAccountInst != null) {
try {
account = osAccountInst.getOsAccount();
dataSource = osAccountInst.getDataSource();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting information from OsAccountInstance.", ex);
}
}
if (account != null && dataSource != null) {
Optional<String> accountAddr = account.getAddr();
// Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system
// and they are not unique
if (accountAddr.isPresent() && !accountAddr.get().equals("S-1-5-18") && !accountAddr.get().equals("S-1-5-19") && !accountAddr.get().equals("S-1-5-20")) {
try {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
accountAddr.get(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource),
dataSource.getName(),
"",
TskData.FileKnown.KNOWN,
account.getId());
correlationAttrs.add(correlationAttributeInstance);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
}
}
if (CentralRepository.isEnabled() && osAccountInst != null) {
try {
correlationAttrs.addAll(makeCorrAttrsToSave(osAccountInst.getOsAccount(), osAccountInst.getDataSource()));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Error getting OS account from OS account instance '%s'", osAccountInst), ex);
}
}
return correlationAttrs;

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2019 Basis Technology Corp.
* Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -98,32 +98,33 @@ public class CorrelationDataSource implements Serializable {
}
/**
* Create a CorrelationDataSource object from a TSK Content object. This
* will add it to the central repository.
* Creates a central repository data source object from a case database data
* source. If the data source is not already present in the central
* repository, it is added.
*
* @param correlationCase the current CorrelationCase used for ensuring
* uniqueness of DataSource
* @param dataSource the sleuthkit datasource that is being added to
* the central repository
* @param correlationCase The central repository case associated with the
* data aosurce.
* @param dataSource The case database data source.
*
* @return
* @return The central repository data source.
*
* @throws CentralRepoException
* @throws CentralRepoException This exception is thrown if there is an
* error creating the central repository data
* source.
*/
public static CorrelationDataSource fromTSKDataSource(CorrelationCase correlationCase, Content dataSource) throws CentralRepoException {
if (!CentralRepository.isEnabled()) {
throw new CentralRepoException(String.format("Central repository is not enabled, cannot create central repository data source for '%s'", dataSource));
}
Case curCase;
try {
curCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) {
throw new CentralRepoException("Autopsy case is closed");
}
CorrelationDataSource correlationDataSource = null;
boolean useCR = CentralRepository.isEnabled();
if (useCR) {
correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId());
throw new CentralRepoException("Error getting current case", ex);
}
CorrelationDataSource correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId());
if (correlationDataSource == null) {
String deviceId;
String md5 = null;
@ -131,7 +132,7 @@ public class CorrelationDataSource implements Serializable {
String sha256 = null;
try {
deviceId = curCase.getSleuthkitCase().getDataSource(dataSource.getId()).getDeviceId();
if (dataSource instanceof Image) {
Image image = (Image) dataSource;
md5 = image.getMd5();
@ -139,15 +140,12 @@ public class CorrelationDataSource implements Serializable {
sha256 = image.getSha256();
}
} catch (TskDataException | TskCoreException ex) {
throw new CentralRepoException("Error getting data source info: " + ex.getMessage());
throw new CentralRepoException("Error getting data source info from case database", ex);
}
correlationDataSource = new CorrelationDataSource(correlationCase, deviceId, dataSource.getName(), dataSource.getId(), md5, sha1, sha256);
if (useCR) {
//add the correlation data source to the central repository and fill in the Central repository data source id in the object
correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource);
}
correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource);
}
return correlationDataSource;
}
@ -205,66 +203,68 @@ public class CorrelationDataSource implements Serializable {
public String getName() {
return name;
}
/**
* @return the MD5 hash value
*/
public String getMd5() {
return (md5Hash == null ? "" : md5Hash);
}
/**
* Set the MD5 hash value and persist to the Central Repository if available.
*
* Set the MD5 hash value and persist to the Central Repository if
* available.
*
* @param md5Hash The MD5 hash value.
*
* @throws CentralRepoException If there's an issue updating the Central
Repository.
* Repository.
*/
public void setMd5(String md5Hash) throws CentralRepoException {
this.md5Hash = md5Hash;
if (dataSourceObjectID != -1) {
CentralRepository.getInstance().updateDataSourceMd5Hash(this);
}
}
/**
* @return the SHA-1 hash value
*/
public String getSha1() {
return (sha1Hash == null ? "" : sha1Hash);
}
/**
* Set the SHA-1 hash value and persist to the Central Repository if
* available.
*
*
* @param sha1Hash The SHA-1 hash value.
*/
public void setSha1(String sha1Hash) throws CentralRepoException {
this.sha1Hash = sha1Hash;
if (dataSourceObjectID != -1) {
CentralRepository.getInstance().updateDataSourceSha1Hash(this);
}
}
/**
* @return the SHA-256 hash value
*/
public String getSha256() {
return (sha256Hash == null ? "" : sha256Hash);
}
/**
* Set the SHA-256 hash value and persist to the Central Repository if
* available.
*
*
* @param sha256Hash The SHA-256 hash value.
*/
public void setSha256(String sha256Hash) throws CentralRepoException {
this.sha256Hash = sha256Hash;
if (dataSourceObjectID != -1) {
CentralRepository.getInstance().updateDataSourceSha256Hash(this);
}

View File

@ -1289,7 +1289,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
if (values == null || values.isEmpty()) {
throw new CorrelationAttributeNormalizationException("Cannot get artifact instances without specified values");
}
return getArtifactInstances(prepareGetInstancesSql(aType, values), aType);
return getCorrAttrInstances(prepareGetInstancesSql(aType, values), aType);
}
@Override
@ -1312,7 +1312,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
inValuesBuilder.append(sql);
inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '")));
inValuesBuilder.append("')");
return getArtifactInstances(inValuesBuilder.toString(), aType);
return getCorrAttrInstances(inValuesBuilder.toString(), aType);
}
/**
@ -1361,40 +1361,44 @@ abstract class RdbmsCentralRepo implements CentralRepository {
}
/**
* Retrieves eamArtifact instances from the database that are associated
* with the eamArtifactType and eamArtifactValues of the given eamArtifact.
* Retrieves correlation attribute instances from the central repository
* that match a given SQL query and correlation attribute type.
*
* @param aType The type of the artifact
* @param values The list of correlation values to get
* CorrelationAttributeInstances for
* @param sql The SQL query.
* @param attrType The correlation attribute type.
*
* @return List of artifact instances for a given type with the specified
* values
* @return The correlation attribute instanes.
*
* @throws CorrelationAttributeNormalizationException
* @throws CentralRepoException
* @throws CorrelationAttributeNormalizationException The exception is
* thrown if the supplied
* correlation attribute
* value cannot be
* normlaized.
* @throws CentralRepoException The exception is
* thrown if there is an
* error querying the
* central repository.
*/
private List<CorrelationAttributeInstance> getArtifactInstances(String sql, CorrelationAttributeInstance.Type aType) throws CorrelationAttributeNormalizationException, CentralRepoException {
private List<CorrelationAttributeInstance> getCorrAttrInstances(String sql, CorrelationAttributeInstance.Type attrType) throws CorrelationAttributeNormalizationException, CentralRepoException {
List<CorrelationAttributeInstance> corrAttrs = new ArrayList<>();
Connection conn = connect();
List<CorrelationAttributeInstance> artifactInstances = new ArrayList<>();
CorrelationAttributeInstance artifactInstance;
PreparedStatement preparedStatement = null;
ResultSet resultSet = null;
try {
preparedStatement = conn.prepareStatement(sql);
resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType);
artifactInstances.add(artifactInstance);
CorrelationAttributeInstance corrAttr = getCorrAttrFromResultSet(resultSet, attrType);
corrAttrs.add(corrAttr);
}
} catch (SQLException ex) {
throw new CentralRepoException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS
throw new CentralRepoException(String.format("Error getting correlation attributes using query %s", sql), ex); // NON-NLS
} finally {
CentralRepoDbUtil.closeResultSet(resultSet);
CentralRepoDbUtil.closeStatement(preparedStatement);
CentralRepoDbUtil.closeConnection(conn);
}
return artifactInstances;
return corrAttrs;
}
/**
@ -1509,7 +1513,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
PreparedStatement preparedStatement = null;
String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(instance.getCorrelationType());
ResultSet resultSet = null;
try {
if (correlationCaseId > 0 && sourceObjID != null && correlationDataSourceId > 0) {
//The CorrelationCase is in the Central repository.
@ -3643,7 +3647,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
*
* @throws SQLException when an expected column name is not in the resultSet
*/
private CorrelationAttributeInstance getEamArtifactInstanceFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException {
private CorrelationAttributeInstance getCorrAttrFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException {
if (null == resultSet) {
return null;
}

View File

@ -1,18 +1,8 @@
caseeventlistener.evidencetag=Evidence
CaseEventsListener.module.name=Central Repository
CaseEventsListener.prevCaseComment.text=Users seen in previous cases
CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)
CentralRepositoryNotificationDialog.bulletHeader=This data is used to:
CentralRepositoryNotificationDialog.bulletOne=Ignore common items (files, domains, and accounts)
CentralRepositoryNotificationDialog.bulletThree=Create personas that group accounts
CentralRepositoryNotificationDialog.bulletTwo=Identify where an item was previously seen
CentralRepositoryNotificationDialog.finalRemarks=To limit what is stored, use the Central Repository options panel.
CentralRepositoryNotificationDialog.header=Autopsy stores data about each case in its Central Repository.
IngestEventsListener.ingestmodule.name=Central Repository
IngestEventsListener.prevCaseComment.text=Previous Case:
# {0} - typeName
# {1} - count
IngestEventsListener.prevCount.text=Number of previous {0}: {1}
IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)
IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)
Installer.centralRepoUpgradeFailed.title=Central repository disabled

View File

@ -22,18 +22,13 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
@ -43,7 +38,6 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent;
import org.sleuthkit.autopsy.casemodule.events.OsAcctInstancesAddedEvent;
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
@ -61,34 +55,20 @@ import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.OsAccountInstance;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase;
/**
* Listen for case events and update entries in the Central Repository database
* accordingly
* An Autopsy events listener for case events relevant to the central
* repository.
*/
@Messages({"caseeventlistener.evidencetag=Evidence"})
public final class CaseEventListener implements PropertyChangeListener {
private static final Logger LOGGER = Logger.getLogger(CaseEventListener.class.getName());
private final ExecutorService jobProcessingExecutor;
private static final String CASE_EVENT_THREAD_NAME = "Case-Event-Listener-%d";
private static final String CASE_EVENT_THREAD_NAME = "CR-Case-Event-Listener-%d";
private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(
Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED,
Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED,
@ -96,14 +76,29 @@ public final class CaseEventListener implements PropertyChangeListener {
Case.Events.DATA_SOURCE_ADDED,
Case.Events.TAG_DEFINITION_CHANGED,
Case.Events.CURRENT_CASE,
Case.Events.DATA_SOURCE_NAME_CHANGED,
Case.Events.OS_ACCT_INSTANCES_ADDED);
Case.Events.DATA_SOURCE_NAME_CHANGED);
private final ExecutorService jobProcessingExecutor;
/**
* Contructs an Autopsy events listener for case events relevant to the
* central repository.
*/
public CaseEventListener() {
jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(CASE_EVENT_THREAD_NAME).build());
}
/**
* Starts up the listener.
*/
public void startUp() {
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
}
/**
* Shuts down the listener.
*/
public void shutdown() {
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor);
}
@ -113,92 +108,73 @@ public final class CaseEventListener implements PropertyChangeListener {
return;
}
CentralRepository dbManager;
try {
dbManager = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Failed to get instance of db manager.", ex);
if (!CentralRepository.isEnabled()) {
return;
}
// If any changes are made to which event types are handled the change
// must also be made to CASE_EVENTS_OF_INTEREST.
CentralRepository centralRepo;
try {
centralRepo = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Failed to access central repository", ex);
return;
}
/*
* IMPORTANT: If any changes are made to which event types are handled,
* the change must also be made to the contents of the
* CASE_EVENTS_OF_INTEREST set.
*/
switch (Case.Events.valueOf(evt.getPropertyName())) {
case CONTENT_TAG_ADDED:
case CONTENT_TAG_DELETED: {
jobProcessingExecutor.submit(new ContentTagTask(dbManager, evt));
}
break;
case CONTENT_TAG_DELETED:
jobProcessingExecutor.submit(new ContentTagTask(centralRepo, evt));
break;
case BLACKBOARD_ARTIFACT_TAG_DELETED:
case BLACKBOARD_ARTIFACT_TAG_ADDED: {
jobProcessingExecutor.submit(new BlackboardTagTask(dbManager, evt));
}
break;
case DATA_SOURCE_ADDED: {
jobProcessingExecutor.submit(new DataSourceAddedTask(dbManager, evt));
}
break;
case TAG_DEFINITION_CHANGED: {
case BLACKBOARD_ARTIFACT_TAG_ADDED:
jobProcessingExecutor.submit(new ArtifactTagTask(centralRepo, evt));
break;
case DATA_SOURCE_ADDED:
jobProcessingExecutor.submit(new DataSourceAddedTask(centralRepo, evt));
break;
case TAG_DEFINITION_CHANGED:
jobProcessingExecutor.submit(new TagDefinitionChangeTask(evt));
}
break;
case CURRENT_CASE: {
jobProcessingExecutor.submit(new CurrentCaseTask(dbManager, evt));
}
break;
case DATA_SOURCE_NAME_CHANGED: {
jobProcessingExecutor.submit(new DataSourceNameChangedTask(dbManager, evt));
}
break;
case OS_ACCT_INSTANCES_ADDED: {
if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) {
jobProcessingExecutor.submit(new OsAccountInstancesAddedTask(dbManager, evt));
}
}
break;
break;
case CURRENT_CASE:
jobProcessingExecutor.submit(new CurrentCaseTask(centralRepo, evt));
break;
case DATA_SOURCE_NAME_CHANGED:
jobProcessingExecutor.submit(new DataSourceNameChangedTask(centralRepo, evt));
break;
default:
break;
}
}
/*
* Add all of our Case Event Listeners to the case.
/**
* Determines whether or not a tag has notable status.
*
* @param tag The tag.
*
* @return True or false.
*/
public void installListeners() {
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
}
/*
* Remove all of our Case Event Listeners from the case.
*/
public void uninstallListeners() {
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
private static boolean isNotableTag(Tag tag) {
return (tag != null && isNotableTagDefinition(tag.getName()));
}
/**
* Returns true if the tag has a notable status.
* Determines whether or not a tag definition calls for notable status.
*
* @param t The tag to use in determination.
* @param tagDef The tag definition.
*
* @return Whether or not it is a notable tag.
* @return True or false.
*/
private static boolean isNotableTag(Tag t) {
return (t != null && isNotableTagName(t.getName()));
private static boolean isNotableTagDefinition(TagName tagDef) {
return (tagDef != null && TagsManager.getNotableTagDisplayNames().contains(tagDef.getDisplayName()));
}
/**
* Returns true if the tag name has a notable status.
*
* @param t The tag name to use in determination.
*
* @return Whether or not it is a notable tag name.
*/
private static boolean isNotableTagName(TagName t) {
return (t != null && TagsManager.getNotableTagDisplayNames().contains(t.getDisplayName()));
}
/**
* Searches a list of tags for a tag with a notable status.
* Searches a list of tags for a tag with notable status.
*
* @param tags The tags to search.
*
@ -208,7 +184,6 @@ public final class CaseEventListener implements PropertyChangeListener {
if (tags == null) {
return false;
}
return tags.stream()
.filter(CaseEventListener::isNotableTag)
.findFirst()
@ -216,24 +191,25 @@ public final class CaseEventListener implements PropertyChangeListener {
}
/**
* Sets the known status of a blackboard artifact in the central repository.
* Sets the notable (known) status of a central repository correlation
* attribute corresponding to an artifact.
*
* @param dbManager The central repo database.
* @param bbArtifact The blackboard artifact to set known status.
* @param knownStatus The new known status.
* @param centralRepo The central repository.
* @param artifact The artifact.
* @param notableStatus The new notable status.
*/
private static void setArtifactKnownStatus(CentralRepository dbManager, BlackboardArtifact bbArtifact, TskData.FileKnown knownStatus) {
List<CorrelationAttributeInstance> convertedArtifacts = new ArrayList<>();
if (bbArtifact instanceof DataArtifact) {
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) bbArtifact));
} else if (bbArtifact instanceof AnalysisResult) {
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) bbArtifact));
private static void setArtifactKnownStatus(CentralRepository centralRepo, BlackboardArtifact artifact, TskData.FileKnown notableStatus) {
List<CorrelationAttributeInstance> corrAttrInstances = new ArrayList<>();
if (artifact instanceof DataArtifact) {
corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) artifact));
} else if (artifact instanceof AnalysisResult) {
corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) artifact));
}
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
for (CorrelationAttributeInstance corrAttrInstance : corrAttrInstances) {
try {
dbManager.setAttributeInstanceKnownStatus(eamArtifact, knownStatus);
centralRepo.setAttributeInstanceKnownStatus(corrAttrInstance, notableStatus);
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database while setting artifact known status.", ex); //NON-NLS
LOGGER.log(Level.SEVERE, String.format("Error setting correlation attribute instance known status", corrAttrInstance), ex); //NON-NLS
}
}
}
@ -359,12 +335,12 @@ public final class CaseEventListener implements PropertyChangeListener {
}
}
private final class BlackboardTagTask implements Runnable {
private final class ArtifactTagTask implements Runnable {
private final CentralRepository dbManager;
private final PropertyChangeEvent event;
private BlackboardTagTask(CentralRepository db, PropertyChangeEvent evt) {
private ArtifactTagTask(CentralRepository db, PropertyChangeEvent evt) {
dbManager = db;
event = evt;
}
@ -644,7 +620,6 @@ public final class CaseEventListener implements PropertyChangeListener {
*/
if ((null == event.getOldValue()) && (event.getNewValue() instanceof Case)) {
Case curCase = (Case) event.getNewValue();
IngestEventsListener.resetCeModuleInstanceCount();
if (!CentralRepository.isEnabled()) {
return;
@ -663,126 +638,6 @@ public final class CaseEventListener implements PropertyChangeListener {
} // CURRENT_CASE
}
@NbBundle.Messages({"CaseEventsListener.module.name=Central Repository",
"CaseEventsListener.prevCaseComment.text=Users seen in previous cases",
"CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"})
/**
* Add OsAccount Instance to CR and find interesting items based on the
* OsAccount
*/
private final class OsAccountInstancesAddedTask implements Runnable {
private final CentralRepository dbManager;
private final PropertyChangeEvent event;
private final String MODULE_NAME = Bundle.CaseEventsListener_module_name();
private OsAccountInstancesAddedTask(CentralRepository db, PropertyChangeEvent evt) {
dbManager = db;
event = evt;
}
@Override
public void run() {
//Nothing to do here if the central repo is not enabled or if ingest is running but is set to not save data/make artifacts
if (!CentralRepository.isEnabled()
|| (IngestManager.getInstance().isIngestRunning() && !(IngestEventsListener.isFlagSeenDevices() || IngestEventsListener.shouldCreateCrProperties()))) {
return;
}
final OsAcctInstancesAddedEvent osAcctInstancesAddedEvent = (OsAcctInstancesAddedEvent) event;
List<OsAccountInstance> addedOsAccountNew = osAcctInstancesAddedEvent.getOsAccountInstances();
for (OsAccountInstance osAccountInstance : addedOsAccountNew) {
try {
OsAccount osAccount = osAccountInstance.getOsAccount();
List<CorrelationAttributeInstance> correlationAttributeInstances = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccountInstance);
if (correlationAttributeInstances.isEmpty()) {
return;
}
Optional<String> accountAddr = osAccount.getAddr();
try {
// Save to the database if requested
if (IngestEventsListener.shouldCreateCrProperties()) {
for (CorrelationAttributeInstance correlationAttributeInstance : correlationAttributeInstances) {
dbManager.addArtifactInstance(correlationAttributeInstance);
}
}
// Look up and create artifacts for previously seen accounts if requested
if (IngestEventsListener.isFlagSeenDevices()) {
CorrelationAttributeInstance instanceWithTypeValue = null;
for (CorrelationAttributeInstance instance : correlationAttributeInstances) {
if (instance.getCorrelationType().getId() == CorrelationAttributeInstance.OSACCOUNT_TYPE_ID) {
instanceWithTypeValue = instance;
break;
}
}
if (instanceWithTypeValue != null) {
List<CorrelationAttributeInstance> previousOccurences = dbManager.getArtifactInstancesByTypeValue(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue());
for (CorrelationAttributeInstance instance : previousOccurences) {
//we can get the first instance here since the case for all attributes will be the same
if (!instance.getCorrelationCase().getCaseUUID().equals(instanceWithTypeValue.getCorrelationCase().getCaseUUID())) {
SleuthkitCase tskCase = osAccount.getSleuthkitCase();
Blackboard blackboard = tskCase.getBlackboard();
List<String> caseDisplayNames = dbManager.getListCasesHavingArtifactInstances(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue());
// calculate score
Score score;
int numCases = caseDisplayNames.size();
if (numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) {
score = Score.SCORE_LIKELY_NOTABLE;
} else if (numCases > IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) {
score = Score.SCORE_NONE;
} else {
// don't make an Analysis Result, the artifact is too common.
continue;
}
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
String justification = "Previously seen in cases " + prevCases;
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.CaseEventsListener_prevExists_text()),
new BlackboardAttribute(
TSK_CORRELATION_TYPE, MODULE_NAME,
instance.getCorrelationType().getDisplayName()),
new BlackboardAttribute(
TSK_CORRELATION_VALUE, MODULE_NAME,
instanceWithTypeValue.getCorrelationValue()),
new BlackboardAttribute(
TSK_OTHER_CASES, MODULE_NAME,
prevCases));
BlackboardArtifact newAnalysisResult = osAccount.newAnalysisResult(
BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score,
null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult();
try {
blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null);
break;
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS
}
}
}
}
}
} catch (CorrelationAttributeNormalizationException ex) {
LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex);
}
}
}
}
private final class DataSourceNameChangedTask implements Runnable {
private final CentralRepository dbManager;
@ -816,6 +671,7 @@ public final class CaseEventListener implements PropertyChangeListener {
LOGGER.log(Level.SEVERE, "No open case", ex);
}
}
} // DATA_SOURCE_NAME_CHANGED
}
}
}

View File

@ -1,673 +0,0 @@
/*
* Central Repository
*
* Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.eventlisteners;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import static java.lang.Boolean.FALSE;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisEvent;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskData;
/**
* Listen for ingest events and update entries in the Central Repository
* database accordingly
*/
@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Central Repository"})
public class IngestEventsListener {
private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName());
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED);
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(DATA_ADDED);
private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
private static int correlationModuleInstanceCount;
private static boolean flagNotableItems;
private static boolean flagSeenDevices;
private static boolean createCrProperties;
private static boolean flagUniqueArtifacts;
private static final String INGEST_EVENT_THREAD_NAME = "Ingest-Event-Listener-%d";
private final ExecutorService jobProcessingExecutor;
private final PropertyChangeListener pcl1 = new IngestModuleEventListener();
private final PropertyChangeListener pcl2 = new IngestJobEventListener();
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10;
static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20;
public IngestEventsListener() {
jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(INGEST_EVENT_THREAD_NAME).build());
}
public void shutdown() {
ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor);
}
/*
* Add all of our Ingest Event Listeners to the IngestManager Instance.
*/
public void installListeners() {
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl1);
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl2);
}
/*
* Remove all of our Ingest Event Listeners from the IngestManager Instance.
*/
public void uninstallListeners() {
IngestManager.getInstance().removeIngestModuleEventListener(pcl1);
IngestManager.getInstance().removeIngestJobEventListener(pcl2);
}
/**
* Increase the number of IngestEventsListeners adding contents to the
* Central Repository.
*/
public synchronized static void incrementCorrelationEngineModuleCount() {
correlationModuleInstanceCount++; //Should be called once in the Central Repository module's startup method.
}
/**
* Decrease the number of IngestEventsListeners adding contents to the
* Central Repository.
*/
public synchronized static void decrementCorrelationEngineModuleCount() {
if (getCeModuleInstanceCount() > 0) { //prevent it ingestJobCounter from going negative
correlationModuleInstanceCount--; //Should be called once in the Central Repository module's shutdown method.
}
}
/**
* Reset the counter which keeps track of if the Central Repository Module
* is being run during injest to 0.
*/
synchronized static void resetCeModuleInstanceCount() {
correlationModuleInstanceCount = 0; //called when a case is opened in case for some reason counter was not reset
}
/**
* Whether or not the Central Repository Module is enabled for any of the
* currently running ingest jobs.
*
* @return boolean True for Central Repository enabled, False for disabled
*/
public synchronized static int getCeModuleInstanceCount() {
return correlationModuleInstanceCount;
}
/**
* Are notable items being flagged?
*
* @return True if flagging notable items; otherwise false.
*/
public synchronized static boolean isFlagNotableItems() {
return flagNotableItems;
}
/**
* Are previously seen devices being flagged?
*
* @return True if flagging seen devices; otherwise false.
*/
public synchronized static boolean isFlagSeenDevices() {
return flagSeenDevices;
}
/**
* Are correlation properties being created
*
* @return True if creating correlation properties; otherwise false.
*/
public synchronized static boolean shouldCreateCrProperties() {
return createCrProperties;
}
/**
* Configure the listener to flag notable items or not.
*
* @param value True to flag notable items; otherwise false.
*/
public synchronized static void setFlagNotableItems(boolean value) {
flagNotableItems = value;
}
/**
* Configure the listener to flag previously seen devices or not.
*
* @param value True to flag seen devices; otherwise false.
*/
public synchronized static void setFlagSeenDevices(boolean value) {
flagSeenDevices = value;
}
/**
* Configure the listener to flag unique apps or not.
*
* @param value True to flag unique apps; otherwise false.
*/
public synchronized static void setFlagUniqueArtifacts(boolean value) {
flagUniqueArtifacts = value;
}
/**
* Are unique apps being flagged?
*
* @return True if flagging unique apps; otherwise false.
*/
public synchronized static boolean isFlagUniqueArtifacts() {
return flagUniqueArtifacts;
}
/**
* Configure the listener to create correlation properties
*
* @param value True to create properties; otherwise false.
*/
public synchronized static void setCreateCrProperties(boolean value) {
createCrProperties = value;
}
/**
* Make a "previously seen" artifact based on a new artifact being
* previously seen.
*
* @param originalArtifact Original artifact that we want to flag
* @param caseDisplayNames List of case names artifact was previously seen
* in
* @param aType The correlation type.
* @param value The correlation value.
*/
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
"IngestEventsListener.prevCaseComment.text=Previous Case: "})
static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames,
CorrelationAttributeInstance.Type aType, String value) {
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
String justification = "Previously marked as notable in cases " + prevCases;
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevTaggedSet_text()),
new BlackboardAttribute(
TSK_CORRELATION_TYPE, MODULE_NAME,
aType.getDisplayName()),
new BlackboardAttribute(
TSK_CORRELATION_VALUE, MODULE_NAME,
value),
new BlackboardAttribute(
TSK_OTHER_CASES, MODULE_NAME,
prevCases));
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevTaggedSet_text(),
Score.SCORE_NOTABLE, justification);
}
/**
* Create a "previously seen" hit for a device which was previously seen in
* the central repository. NOTE: Artifacts that are too common will be
* skipped.
*
* @param originalArtifact the artifact to create the "previously seen" item
* for
* @param caseDisplayNames the case names the artifact was previously seen
* in
* @param aType The correlation type.
* @param value The correlation value.
*/
@NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)",
"# {0} - typeName",
"# {1} - count",
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames,
CorrelationAttributeInstance.Type aType, String value) {
// calculate score
Score score;
int numCases = caseDisplayNames.size();
if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) {
score = Score.SCORE_LIKELY_NOTABLE;
} else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) {
score = Score.SCORE_NONE;
} else {
// don't make an Analysis Result, the artifact is too common.
return;
}
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
String justification = "Previously seen in cases " + prevCases;
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text()),
new BlackboardAttribute(
TSK_CORRELATION_TYPE, MODULE_NAME,
aType.getDisplayName()),
new BlackboardAttribute(
TSK_CORRELATION_VALUE, MODULE_NAME,
value),
new BlackboardAttribute(
TSK_OTHER_CASES, MODULE_NAME,
prevCases));
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(),
score, justification);
}
/**
* Create a "previously unseen" hit for an application which was never seen
* in the central repository.
*
* @param originalArtifact the artifact to create the "previously unseen"
* item for
* @param aType The correlation type.
* @param value The correlation value.
*/
static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) {
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
new BlackboardAttribute(
TSK_CORRELATION_TYPE, MODULE_NAME,
aType.getDisplayName()),
new BlackboardAttribute(
TSK_CORRELATION_VALUE, MODULE_NAME,
value));
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "",
Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before");
}
/**
* Make an artifact to flag the passed in artifact.
*
* @param newArtifactType Type of artifact to create.
* @param originalArtifact Artifact in current case we want to flag
* @param attributesForNewArtifact Attributes to assign to the new artifact
* @param configuration The configuration to be specified for the
* new artifact hit
* @param score sleuthkit.datamodel.Score to be assigned
* to this artifact
* @param justification Justification string
*/
private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact, String configuration,
Score score, String justification) {
try {
SleuthkitCase tskCase = originalArtifact.getSleuthkitCase();
Blackboard blackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID());
if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) {
BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult(
newArtifactType, score,
null, configuration, justification, attributesForNewArtifact)
.getAnalysisResult();
try {
blackboard.postArtifact(newArtifact, MODULE_NAME, null);
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS
}
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
}
}
private class IngestModuleEventListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
//if ingest is running we want there to check if there is a Central Repository module running
//sometimes artifacts are generated by DSPs or other sources while ingest is not running
//in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate
if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
CentralRepository dbManager;
try {
dbManager = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
return;
}
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case DATA_ADDED: {
//if ingest isn't running create the "previously seen" items,
// otherwise use the ingest module setting to determine if we create "previously seen" items
boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems();
boolean flagPrevious = !IngestManager.getInstance().isIngestRunning() || isFlagSeenDevices();
boolean createAttributes = !IngestManager.getInstance().isIngestRunning() || shouldCreateCrProperties();
boolean flagUnique = !IngestManager.getInstance().isIngestRunning() || isFlagUniqueArtifacts();
jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable, flagPrevious, createAttributes, flagUnique));
break;
}
default:
break;
}
}
}
}
private class IngestJobEventListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
CentralRepository dbManager;
try {
dbManager = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
return;
}
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
case DATA_SOURCE_ANALYSIS_COMPLETED: {
jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt));
break;
}
default:
break;
}
}
}
private final class AnalysisCompleteTask implements Runnable {
private final CentralRepository dbManager;
private final PropertyChangeEvent event;
private AnalysisCompleteTask(CentralRepository db, PropertyChangeEvent evt) {
dbManager = db;
event = evt;
}
@Override
public void run() {
// clear the tracker to reduce memory usage
if (getCeModuleInstanceCount() == 0) {
recentlyAddedCeArtifacts.clear();
}
//else another instance of the Central Repository Module is still being run.
/*
* Ensure the data source in the Central Repository has hash values
* that match those in the case database.
*/
if (!CentralRepository.isEnabled()) {
return;
}
Content dataSource;
String dataSourceName = "";
long dataSourceObjectId = -1;
try {
dataSource = ((DataSourceAnalysisEvent) event).getDataSource();
/*
* We only care about Images for the purpose of updating hash
* values.
*/
if (!(dataSource instanceof Image)) {
return;
}
dataSourceName = dataSource.getName();
dataSourceObjectId = dataSource.getId();
Case openCase = Case.getCurrentCaseThrows();
CorrelationCase correlationCase = dbManager.getCase(openCase);
if (null == correlationCase) {
correlationCase = dbManager.newCase(openCase);
}
CorrelationDataSource correlationDataSource = dbManager.getDataSource(correlationCase, dataSource.getId());
if (correlationDataSource == null) {
// Add the data source.
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource);
} else {
// Sync the data source hash values if necessary.
if (dataSource instanceof Image) {
Image image = (Image) dataSource;
String imageMd5Hash = image.getMd5();
if (imageMd5Hash == null) {
imageMd5Hash = "";
}
String crMd5Hash = correlationDataSource.getMd5();
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
correlationDataSource.setMd5(imageMd5Hash);
}
String imageSha1Hash = image.getSha1();
if (imageSha1Hash == null) {
imageSha1Hash = "";
}
String crSha1Hash = correlationDataSource.getSha1();
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
correlationDataSource.setSha1(imageSha1Hash);
}
String imageSha256Hash = image.getSha256();
if (imageSha256Hash == null) {
imageSha256Hash = "";
}
String crSha256Hash = correlationDataSource.getSha256();
if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) {
correlationDataSource.setSha256(imageSha256Hash);
}
}
}
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format(
"Unable to fetch data from the Central Repository for data source '%s' (obj_id=%d)",
dataSourceName, dataSourceObjectId), ex);
} catch (NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "No current case opened.", ex);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, String.format(
"Unable to fetch data from the case database for data source '%s' (obj_id=%d)",
dataSourceName, dataSourceObjectId), ex);
}
} // DATA_SOURCE_ANALYSIS_COMPLETED
}
private final class DataAddedTask implements Runnable {
private final CentralRepository dbManager;
private final PropertyChangeEvent event;
private final boolean flagNotableItemsEnabled;
private final boolean flagPreviousItemsEnabled;
private final boolean createCorrelationAttributes;
private final boolean flagUniqueItemsEnabled;
private DataAddedTask(CentralRepository db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes, boolean flagUnique) {
this.dbManager = db;
this.event = evt;
this.flagNotableItemsEnabled = flagNotableItemsEnabled;
this.flagPreviousItemsEnabled = flagPreviousItemsEnabled;
this.createCorrelationAttributes = createCorrelationAttributes;
this.flagUniqueItemsEnabled = flagUnique;
}
@Override
public void run() {
if (!CentralRepository.isEnabled()) {
return;
}
final ModuleDataEvent mde = (ModuleDataEvent) event.getOldValue();
Collection<BlackboardArtifact> bbArtifacts = mde.getArtifacts();
if (null == bbArtifacts) { //the ModuleDataEvents don't always have a collection of artifacts set
return;
}
List<CorrelationAttributeInstance> eamArtifacts = new ArrayList<>();
for (BlackboardArtifact bbArtifact : bbArtifacts) {
// makeCorrAttrToSave will filter out artifacts which should not be sources of CR data.
List<CorrelationAttributeInstance> convertedArtifacts = new ArrayList<>();
if (bbArtifact instanceof DataArtifact) {
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact));
}
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
try {
// Only do something with this artifact if it's unique within the job
if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) {
// Get a list of instances for a given value (hash, email, etc.)
List<CorrelationAttributeInstance> previousOccurrences = new ArrayList<>();
// check if we are flagging things
if (flagNotableItemsEnabled || flagPreviousItemsEnabled || flagUniqueItemsEnabled) {
try {
previousOccurrences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
// make sure the previous instances do not contain current case
for (Iterator<CorrelationAttributeInstance> iterator = previousOccurrences.iterator(); iterator.hasNext();) {
CorrelationAttributeInstance instance = iterator.next();
if (instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) {
// this is the current case - remove the instace from the previousOccurrences list
iterator.remove();
}
}
} catch (CorrelationAttributeNormalizationException ex) {
LOGGER.log(Level.INFO, String.format("Unable to flag previously seen device: %s.", eamArtifact.toString()), ex);
}
}
// Was it previously marked as bad?
// query db for artifact instances having this TYPE/VALUE and knownStatus = "Bad".
// if getKnownStatus() is "Unknown" and this artifact instance was marked bad in a previous case,
// create TSK_PREVIOUSLY_SEEN artifact on BB.
if (flagNotableItemsEnabled) {
List<String> caseDisplayNames = getCaseDisplayNamesForNotable(previousOccurrences);
if (!caseDisplayNames.isEmpty()) {
makeAndPostPreviousNotableArtifact(bbArtifact,
caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
// if we have marked this artifact as notable, then skip the analysis of whether it was previously seen
continue;
}
}
// flag previously seen devices and communication accounts (emails, phones, etc)
if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty()
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) {
List<String> caseDisplayNames = getCaseDisplayNames(previousOccurrences);
makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
}
// flag previously unseen apps and domains
if (flagUniqueItemsEnabled
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) {
if (previousOccurrences.isEmpty()) {
makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
}
}
if (createCorrelationAttributes) {
eamArtifacts.add(eamArtifact);
}
}
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error counting notable artifacts.", ex);
}
}
}
if (FALSE == eamArtifacts.isEmpty()) {
for (CorrelationAttributeInstance eamArtifact : eamArtifacts) {
try {
dbManager.addArtifactInstance(eamArtifact);
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error adding artifact to database.", ex); //NON-NLS
}
}
} // DATA_ADDED
}
}
/**
* Gets case display names for a list of CorrelationAttributeInstance.
*
* @param occurrences List of CorrelationAttributeInstance
*
* @return List of case display names
*/
private List<String> getCaseDisplayNames(List<CorrelationAttributeInstance> occurrences) {
List<String> caseNames = new ArrayList<>();
for (CorrelationAttributeInstance occurrence : occurrences) {
caseNames.add(occurrence.getCorrelationCase().getDisplayName());
}
return caseNames;
}
/**
* Gets case display names for only occurrences marked as NOTABLE/BAD.
*
* @param occurrences List of CorrelationAttributeInstance
*
* @return List of case display names of NOTABLE/BAD occurrences
*/
private List<String> getCaseDisplayNamesForNotable(List<CorrelationAttributeInstance> occurrences) {
List<String> caseNames = new ArrayList<>();
for (CorrelationAttributeInstance occurrence : occurrences) {
if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) {
caseNames.add(occurrence.getCorrelationCase().getDisplayName());
}
}
return caseNames;
}
}

View File

@ -1,12 +1,26 @@
CentralRepoIngestModel_name_header=Name:<br>
CentralRepoIngestModel_previous_case_header=<br>Previous Cases:<br>
CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module.
CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized
CentralRepoIngestModule.prevCaseComment.text=Previous Case:
CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)
CentralRepoIngestModule_notable_message_header=<html>A file in this data source was previously seen and tagged as Notable.<br>
# {0} - Name of file that is Notable
CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}
CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type
CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository
CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository
CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository
CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository
CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled
CentralRepoIngestModule_filename_inbox_msg_header=File Name
CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash
CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository
CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case
CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute
# {0} - Name of item that is Notable
CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0}
# {0} - list of cases
CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}
CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)
CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases
# {0} - list of cases
CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}
CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)
CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases
CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)
CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases
CentralRepoIngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation
CentralRepoIngestModuleFactory.ingestmodule.name=Central Repository
IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Copyright 2021-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -18,37 +18,328 @@
*/
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import java.util.concurrent.atomic.AtomicLong;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.getOccurrencesInOtherCases;
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult;
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevSeenAnalysisResult;
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevUnseenAnalysisResult;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.OsAccountManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* RJCTODO
*
* NOTE TO REVIEWER:
*
* This is a placeholder data artifact ingest module that counts the number of
* data artifacts it processes and posts the final count to the ingest inbox.
* The guts of the module will be supplied by a later PR.
* A data artifact ingest module that adds correlation attributes for data
* artifacts and OS accounts to the central repository and makes analysis
* results based on previous occurences. When the ingest job is completed,
* ensures the data source in the central repository has hash values that match
* those in the case database.
*/
public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule {
private final AtomicLong artifactCounter = new AtomicLong();
private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName());
private final boolean flagNotableItems;
private final boolean flagPrevSeenDevices;
private final boolean flagUniqueArtifacts;
private final boolean saveCorrAttrInstances;
private final Set<String> corrAttrValuesAlreadyProcessed;
private CentralRepository centralRepo;
private IngestJobContext context;
/**
* Constructs a data artifact ingest module that adds correlation attributes
* for data artifacts and OS accounts to the central repository and makes
* analysis results based on previous occurences. When the ingest job is
* completed, ensures the data source in the central repository has hash
* values that match those in the case database.
*
* @param settings The ingest job settings for this module.
*/
CentralRepoDataArtifactIngestModule(IngestSettings settings) {
flagNotableItems = settings.isFlagTaggedNotableItems();
flagPrevSeenDevices = settings.isFlagPreviousDevices();
flagUniqueArtifacts = settings.isFlagUniqueArtifacts();
saveCorrAttrInstances = settings.shouldCreateCorrelationProperties();
corrAttrValuesAlreadyProcessed = new LinkedHashSet<>();
}
@NbBundle.Messages({
"CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled",
"CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository",
"CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case",
"CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository"
})
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
if (!CentralRepository.isEnabled()) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); // May be displayed to user.
}
try {
centralRepo = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex);
}
/*
* Don't allow a SQLite central repository to be used for a multi-user
* case.
*/
try {
Case currentCase = Case.getCurrentCaseThrows();
if ((currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crDatabaseTypeMismatch());
}
} catch (NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex);
}
}
/**
* Translates the attributes of a data artifact into central repository
* correlation attributes and uses them to create analysis results and new
* central repository correlation attribute instances, depending on ingest
* job settings.
*
* @param artifact The data artifact.
*
* @return An ingest module process result.
*/
@Override
public ProcessResult process(DataArtifact artifact) {
artifactCounter.incrementAndGet();
if (flagNotableItems || flagPrevSeenDevices || flagUniqueArtifacts || saveCorrAttrInstances) {
for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)) {
if (corrAttrValuesAlreadyProcessed.add(corrAttr.toString())) {
makeAnalysisResults(artifact, corrAttr);
if (saveCorrAttrInstances) {
try {
centralRepo.addAttributeInstanceBulk(corrAttr);
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s' (job ID=%d)", corrAttr, artifact, context.getJobId()), ex); //NON-NLS
}
}
}
}
}
return ProcessResult.OK;
}
/**
* Makes analysis results for a data artifact based on previous occurrences,
* if any, of a correlation attribute.
*
* @param artifact The data artifact.
* @param corrAttr A correlation attribute for the data artifact.
*/
private void makeAnalysisResults(DataArtifact artifact, CorrelationAttributeInstance corrAttr) {
List<CorrelationAttributeInstance> previousOccurrences = null;
if (flagNotableItems) {
previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
if (!previousOccurrences.isEmpty()) {
Set<String> previousCases = new HashSet<>();
for (CorrelationAttributeInstance occurrence : previousOccurrences) {
if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) {
previousCases.add(occurrence.getCorrelationCase().getDisplayName());
}
}
if (!previousCases.isEmpty()) {
makePrevNotableAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
}
}
}
if (flagPrevSeenDevices
&& (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) {
if (previousOccurrences == null) {
previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
}
if (!previousOccurrences.isEmpty()) {
Set<String> previousCases = getPreviousCases(previousOccurrences);
if (!previousCases.isEmpty()) {
makePrevSeenAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
}
}
}
if (flagUniqueArtifacts
&& (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) {
if (previousOccurrences == null) {
previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
}
if (previousOccurrences.isEmpty()) {
makePrevUnseenAnalysisResult(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
}
}
}
/**
* Gets a unique set of previous cases, represented by their names, from a
* list of previous occurrences of correlation attributes.
*
* @param previousOccurrences The correlations attributes.
*
* @return The names of the previous cases.
*/
private Set<String> getPreviousCases(List<CorrelationAttributeInstance> previousOccurrences) {
Set<String> previousCases = new HashSet<>();
for (CorrelationAttributeInstance occurrence : previousOccurrences) {
previousCases.add(occurrence.getCorrelationCase().getDisplayName());
}
return previousCases;
}
@Override
public void shutDown() {
IngestServices.getInstance().postMessage(IngestMessage.createMessage(
IngestMessage.MessageType.INFO,
CentralRepoIngestModuleFactory.getModuleName(),
String.format("%d data artifacts processed", artifactCounter.get()))); //NON-NLS
analyzeOsAccounts();
if (saveCorrAttrInstances) {
try {
centralRepo.commitAttributeInstancesBulk();
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS
}
}
syncDataSourceHashes();
}
/**
* Queries the case database for any OS accounts assoicated with the data
* source for the ingest job. The attributes of any OS account returned by
* the query are translated into central repository correlation attributes
* and used them to create analysis results and new central repository
* correlation attribute instances, depending on ingest job settings.
*/
@NbBundle.Messages({
"CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases",
"CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)"
})
private void analyzeOsAccounts() {
if (saveCorrAttrInstances || flagPrevSeenDevices) {
try {
OsAccountManager osAccountMgr = Case.getCurrentCaseThrows().getSleuthkitCase().getOsAccountManager();
List<OsAccount> osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(context.getDataSource().getId());
for (OsAccount osAccount : osAccounts) {
for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, context.getDataSource())) {
if (flagPrevSeenDevices) {
makeAnalysisResults(osAccount, corrAttr);
}
if (saveCorrAttrInstances) {
try {
centralRepo.addAttributeInstanceBulk(corrAttr);
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s'(job ID=%d)", corrAttr, osAccount, context.getJobId()), ex); //NON-NLS
}
}
}
}
} catch (NoCurrentCaseException | TskCoreException ex) {
LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source '%s' (job ID=%d)", context.getDataSource(), context.getJobId()), ex);
}
}
}
/**
* Makes analysis results for an OS Account based on previous occurrences,
* if any, of a correlation attribute.
*
* @param artifact The data artifact.
* @param corrAttr A correlation attribute for the data artifact.
*/
private void makeAnalysisResults(OsAccount osAccount, CorrelationAttributeInstance corrAttr) {
if (flagPrevSeenDevices) {
List<CorrelationAttributeInstance> previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
if (!previousOccurrences.isEmpty()) {
Set<String> previousCases = getPreviousCases(previousOccurrences);
if (!previousCases.isEmpty()) {
makePrevSeenAnalysisResult(osAccount, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
}
}
}
}
/**
* Ensures the data source in the central repository has hash values that
* match those in the case database.
*/
private void syncDataSourceHashes() {
if (!(context.getDataSource() instanceof Image)) {
return;
}
try {
Case currentCase = Case.getCurrentCaseThrows();
CorrelationCase correlationCase = centralRepo.getCase(currentCase);
if (correlationCase == null) {
correlationCase = centralRepo.newCase(currentCase);
}
CorrelationDataSource correlationDataSource = centralRepo.getDataSource(correlationCase, context.getDataSource().getId());
if (correlationDataSource == null) {
correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, context.getDataSource());
}
Image image = (Image) context.getDataSource();
String imageMd5Hash = image.getMd5();
if (imageMd5Hash == null) {
imageMd5Hash = "";
}
String crMd5Hash = correlationDataSource.getMd5();
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
correlationDataSource.setMd5(imageMd5Hash);
}
String imageSha1Hash = image.getSha1();
if (imageSha1Hash == null) {
imageSha1Hash = "";
}
String crSha1Hash = correlationDataSource.getSha1();
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
correlationDataSource.setSha1(imageSha1Hash);
}
String imageSha256Hash = image.getSha256();
if (imageSha256Hash == null) {
imageSha256Hash = "";
}
String crSha256Hash = correlationDataSource.getSha256();
if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) {
correlationDataSource.setSha256(imageSha256Hash);
}
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex);
} catch (NoCurrentCaseException | TskCoreException ex) {
LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex);
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2011-2021 Basis Technology Corp.
* Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -18,11 +18,10 @@
*/
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
@ -32,95 +31,52 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_PREVIOUSLY_NOTABLE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.datamodel.Score;
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult;
/**
* Ingest module for inserting entries into the Central Repository database on
* ingest of a data source
* A file ingest module that adds correlation attributes for files to the
* central repository, and makes previously notable analysis results for files
* marked as notable in other cases.
*/
@Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
"CentralRepoIngestModule.prevCaseComment.text=Previous Case: "})
final class CentralRepoIngestModule implements FileIngestModule {
private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false;
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false;
static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false;
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
private final static Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private static final Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName());
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter();
private long jobId;
private CorrelationCase eamCase;
private CorrelationDataSource eamDataSource;
private final boolean flagNotableItems;
private final boolean saveCorrAttrInstances;
private CorrelationAttributeInstance.Type filesType;
private final boolean flagTaggedNotableItems;
private final boolean flagPreviouslySeenDevices;
private Blackboard blackboard;
private final boolean createCorrelationProperties;
private final boolean flagUniqueArtifacts;
private IngestJobContext context;
private CentralRepository centralRepo;
/**
* Instantiate the Central Repository ingest module.
* Constructs a file ingest module that adds correlation attributes for
* files to the central repository, and makes previously notable analysis
* results for files marked as notable in other cases.
*
* @param settings The ingest settings for the module instance.
* @param settings The ingest job settings.
*/
CentralRepoIngestModule(IngestSettings settings) {
flagTaggedNotableItems = settings.isFlagTaggedNotableItems();
flagPreviouslySeenDevices = settings.isFlagPreviousDevices();
createCorrelationProperties = settings.shouldCreateCorrelationProperties();
flagUniqueArtifacts = settings.isFlagUniqueArtifacts();
}
flagNotableItems = settings.isFlagTaggedNotableItems();
saveCorrAttrInstances = settings.shouldCreateCorrelationProperties();
}
@Override
public ProcessResult process(AbstractFile abstractFile) {
if (CentralRepository.isEnabled() == false) {
/*
* Not signaling an error for now. This is a workaround for the way
* all newly didscovered ingest modules are automatically anabled.
*
* TODO (JIRA-2731): Add isEnabled API for ingest modules.
*/
if (!flagNotableItems && !saveCorrAttrInstances) {
return ProcessResult.OK;
}
try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
return ProcessResult.ERROR;
}
if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) {
if (!filesType.isEnabled()) {
return ProcessResult.OK;
}
@ -128,290 +84,119 @@ final class CentralRepoIngestModule implements FileIngestModule {
return ProcessResult.OK;
}
CentralRepository dbManager;
try {
dbManager = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
return ProcessResult.ERROR;
}
// only continue if we are correlating filesType
if (!filesType.isEnabled()) {
if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) {
return ProcessResult.OK;
}
// get the hash because we're going to correlate it
/*
* The correlation attribute value for a file is its MD5 hash. This
* module cannot do anything with a file if the hash calculation has not
* been done, but the decision has been made to not do a hash
* calculation here if the file hashing and lookup module is not in this
* pipeline ahead of this module (affirmed per BC, 11/8/21).
*/
String md5 = abstractFile.getMd5Hash();
if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) {
return ProcessResult.OK;
}
/*
* Search the central repo to see if this file was previously marked as
* being bad. Create artifact if it was.
*/
if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) {
if (flagNotableItems) {
try {
TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query");
List<String> caseDisplayNamesList = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5);
Set<String> otherCases = new HashSet<>();
otherCases.addAll(centralRepo.getListCasesHavingArtifactInstancesKnownBad(filesType, md5));
HealthMonitor.submitTimingMetric(timingMetric);
if (!caseDisplayNamesList.isEmpty()) {
postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5);
if (!otherCases.isEmpty()) {
makePrevNotableAnalysisResult(abstractFile, otherCases, filesType, md5, context.getDataSource().getId(), context.getJobId());
}
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS
return ProcessResult.ERROR;
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS
return ProcessResult.ERROR;
}
}
// insert this file into the central repository
if (createCorrelationProperties) {
try {
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
filesType,
md5,
eamCase,
eamDataSource,
abstractFile.getParentPath() + abstractFile.getName(),
null,
TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database.
,
abstractFile.getId());
dbManager.addAttributeInstanceBulk(cefi);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
return ProcessResult.ERROR;
} catch (CorrelationAttributeNormalizationException ex) {
logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
return ProcessResult.ERROR;
if (saveCorrAttrInstances) {
List<CorrelationAttributeInstance> corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(abstractFile);
for (CorrelationAttributeInstance corrAttr : corrAttrs) {
try {
centralRepo.addAttributeInstanceBulk(corrAttr);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
}
}
}
return ProcessResult.OK;
}
@Override
public void shutDown() {
IngestEventsListener.decrementCorrelationEngineModuleCount();
if ((CentralRepository.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) {
return;
if (refCounter.decrementAndGet(context.getJobId()) == 0) {
try {
centralRepo.commitAttributeInstancesBulk();
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Error committing bulk insert of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS
}
}
CentralRepository dbManager;
try {
dbManager = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
return;
}
try {
dbManager.commitAttributeInstancesBulk();
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS
}
try {
Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamDataSource);
logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS
}
// TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk.
refCounter.decrementAndGet(jobId);
}
// see ArtifactManagerTimeTester for details
}
@Messages({
"CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized",
"CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module."
"CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository",
"CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository",
"CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository"
})
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
IngestEventsListener.incrementCorrelationEngineModuleCount();
if (!CentralRepository.isEnabled()) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg());
}
try {
centralRepo = CentralRepository.getInstance();
} catch (CentralRepoException ex) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex);
}
/*
* Tell the IngestEventsListener to flag notable items based on the
* current module's configuration. This is a work around for the lack of
* an artifacts pipeline. Note that this can be changed by another
* module instance. All modules are affected by the value. While not
* ideal, this will be good enough until a better solution can be
* posited.
*
* Note: Flagging cannot be disabled if any other instances of the
* Central Repository module are running. This restriction is to prevent
* missing results in the case where the first module is flagging
* notable items, and the proceeding module (with flagging disabled)
* causes the first to stop flagging.
* Make sure the correlation attribute type definition is in the central
* repository. Currently (11/8/21) it is cached, but there is no harm in
* saving it here for use in process().
*/
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagNotableItems()) {
IngestEventsListener.setFlagNotableItems(flagTaggedNotableItems);
}
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagSeenDevices()) {
IngestEventsListener.setFlagSeenDevices(flagPreviouslySeenDevices);
}
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.shouldCreateCrProperties()) {
IngestEventsListener.setCreateCrProperties(createCorrelationProperties);
}
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) {
IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts);
}
if (CentralRepository.isEnabled() == false) {
/*
* Not throwing the customary exception for now. This is a
* workaround for the way all newly didscovered ingest modules are
* automatically anabled.
*
* TODO (JIRA-2731): Add isEnabled API for ingest modules.
*/
if (RuntimeProperties.runningWithGUI()) {
if (1L == warningMsgRefCounter.incrementAndGet(jobId)) {
MessageNotifyUtil.Notify.warn(Bundle.CentralRepoIngestModule_notfyBubble_title(), Bundle.CentralRepoIngestModule_errorMessage_isNotEnabled());
}
}
return;
}
Case autopsyCase;
try {
autopsyCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
throw new IngestModuleException("Exception while getting open case.", ex);
}
// Don't allow sqlite central repo databases to be used for multi user cases
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
&& (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) {
logger.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository.");
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
}
jobId = context.getJobId();
CentralRepository centralRepoDb;
try {
centralRepoDb = CentralRepository.getInstance();
filesType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS
throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS
throw new IngestModuleException(Bundle.CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg(), ex);
}
try {
filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
}
try {
eamCase = centralRepoDb.getCase(autopsyCase);
} catch (CentralRepoException ex) {
throw new IngestModuleException("Unable to get case from central repository database ", ex);
}
try {
eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource());
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS
throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS
}
// TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter
// if we are the first thread / module for this job, then make sure the case
// and image exist in the DB before we associate artifacts with it.
if (refCounter.incrementAndGet(jobId)
== 1) {
// ensure we have this data source in the EAM DB
/*
* The first module instance started for this job makes sure the current
* case and data source are in the central repository. Currently
* (11/8/21), these are cached upon creation / first retreival.
*/
if (refCounter.incrementAndGet(context.getJobId()) == 1) {
Case currentCase;
try {
if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) {
centralRepoDb.newDataSource(eamDataSource);
}
currentCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex);
}
CorrelationCase centralRepoCase;
try {
centralRepoCase = centralRepo.getCase(currentCase);
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS
throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS
throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrCaseErrMsg(), ex);
}
}
}
/**
* Post a new "previously seen" artifact for the file marked bad.
*
* @param abstractFile The file from which to create an artifact.
* @param caseDisplayNames Case names to be added to a TSK_COMMON attribute.
*/
private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames, CorrelationAttributeInstance.Type aType, String value) {
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
String justification = "Previously marked as notable in cases " + prevCases;
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevTaggedSet_text()),
new BlackboardAttribute(
TSK_CORRELATION_TYPE, MODULE_NAME,
aType.getDisplayName()),
new BlackboardAttribute(
TSK_CORRELATION_VALUE, MODULE_NAME,
value),
new BlackboardAttribute(
TSK_OTHER_CASES, MODULE_NAME,
prevCases));
try {
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult(
BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE,
null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes)
.getAnalysisResult();
try {
// index the artifact for keyword search
blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}
// send inbox message
sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash(), caseDisplayNames);
try {
CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource());
} catch (CentralRepoException ex) {
throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrDataSourceErrMsg(), ex);
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
}
}
@Messages({
"CentralRepoIngestModule_notable_message_header=<html>A file in this data source was previously seen and tagged as Notable.<br>",
"CentralRepoIngestModel_name_header=Name:<br>",
"CentralRepoIngestModel_previous_case_header=<br>Previous Cases:<br>",
"# {0} - Name of file that is Notable",
"CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}"
})
/**
* Post a message to the ingest inbox alerting the user that a bad file was
* found.
*
* @param artifact badFile Blackboard Artifact
* @param name badFile's name
* @param md5Hash badFile's md5 hash
* @param caseDisplayNames List of cases that the artifact appears in.
*/
private void sendBadFileInboxMessage(BlackboardArtifact artifact, String name, String md5Hash, List<String> caseDisplayNames) {
StringBuilder detailsSb = new StringBuilder(1024);
detailsSb.append(Bundle.CentralRepoIngestModule_notable_message_header()).append(Bundle.CentralRepoIngestModel_name_header());
detailsSb.append(name).append(Bundle.CentralRepoIngestModel_previous_case_header());
for (String str : caseDisplayNames) {
detailsSb.append(str).append("<br>");
}
detailsSb.append("</html>");
services.postMessage(IngestMessage.createDataMessage(CentralRepoIngestModuleFactory.getModuleName(),
Bundle.CentralRepoIngestModule_postToBB_knownBadMsg(name),
detailsSb.toString(),
name + md5Hash,
artifact));
}
}

View File

@ -128,7 +128,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter {
@Override
public DataArtifactIngestModule createDataArtifactIngestModule(IngestModuleIngestJobSettings settings) {
return new CentralRepoDataArtifactIngestModule();
return new CentralRepoDataArtifactIngestModule((IngestSettings) settings);
}
}

View File

@ -0,0 +1,336 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AnalysisResult;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Utility methods shared by the central repository ingest modules.
*/
class CentralRepoIngestModuleUtils {
private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName());
private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10;
private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20;
private final static String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
/**
* Gets any previous occurrences of a given correlation attribute in cases
* other than the current case.
*
* @param corrAttr The correlation attribute.
*
* @return The other occurrences of the correlation attribute.
*/
static List<CorrelationAttributeInstance> getOccurrencesInOtherCases(CorrelationAttributeInstance corrAttr, long ingestJobId) {
List<CorrelationAttributeInstance> previousOccurrences = new ArrayList<>();
try {
CentralRepository centralRepo = CentralRepository.getInstance();
previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue());
for (Iterator<CorrelationAttributeInstance> iterator = previousOccurrences.iterator(); iterator.hasNext();) {
CorrelationAttributeInstance prevOccurrence = iterator.next();
if (prevOccurrence.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) {
iterator.remove();
}
}
} catch (CorrelationAttributeNormalizationException ex) {
LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value for 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS
}
return previousOccurrences;
}
/**
* Makes a previously notable analysis result for a content.
*
* @param content The content.
* @param previousCases The names of the cases in which the artifact was
* deemed notable.
* @param corrAttrType The type of the matched correlation attribute.
* @param corrAttrValue The value of the matched correlation attribute.
* @param dataSourceObjId The data source object ID.
* @param ingestJobId The ingest job ID.
*/
@NbBundle.Messages({
"CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)",
"# {0} - list of cases",
"CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}"
})
static void makePrevNotableAnalysisResult(Content content, Set<String> previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) {
String prevCases = previousCases.stream().collect(Collectors.joining(","));
String justification = Bundle.CentralRepoIngestModule_notableJustification(prevCases);
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_notableSetName()),
new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()),
new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue),
new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases));
Optional<AnalysisResult> result = makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification, dataSourceObjId, ingestJobId);
if (result.isPresent()) {
postNotableMessage(content, previousCases, corrAttrValue, result.get());
}
}
/**
* Makes a previously seen analysis result for a content, unless the content
* is too common.
*
* @param content The content.
* @param previousCases The names of the cases in which the artifact was
* previously seen.
* @param corrAttrType The type of the matched correlation attribute.
* @param corrAttrValue The value of the matched correlation attribute.
* @param dataSourceObjId The data source object ID.
* @param ingestJobId The ingest job ID.
*/
@NbBundle.Messages({
"CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)",
"# {0} - list of cases",
"CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}"
})
static void makePrevSeenAnalysisResult(Content content, Set<String> previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) {
Optional<Score> score = calculateScore(previousCases.size());
if (score.isPresent()) {
String prevCases = previousCases.stream().collect(Collectors.joining(","));
String justification = Bundle.CentralRepoIngestModule_prevSeenJustification(prevCases);
Collection<BlackboardAttribute> analysisResultAttributes = Arrays.asList(
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_prevSeenSetName()),
new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()),
new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue),
new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases));
makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score.get(), justification, dataSourceObjId, ingestJobId);
}
}
/**
* Makes a previously unseen analysis result for a content.
*
* @param content The content.
* @param corrAttrType The type of the new correlation attribute.
* @param corrAttrValue The value of the new correlation attribute.
* @param dataSourceObjId The data source object ID.
* @param ingestJobId The ingest job ID.
*/
@NbBundle.Messages({
"CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases"
})
static void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) {
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()),
new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue));
makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CentralRepoIngestModule_prevUnseenJustification(), dataSourceObjId, ingestJobId);
}
/**
* Calculates a score based in a number of previous cases.
*
* @param numPreviousCases The number of previous cases.
*
* @return An Optional of a score, will be empty if there is no score
* because the number of previous cases is too high, indicating a
* common and therefore uninteresting item.
*/
static Optional<Score> calculateScore(int numPreviousCases) {
Score score = null;
if (numPreviousCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) {
score = Score.SCORE_LIKELY_NOTABLE;
} else if (numPreviousCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numPreviousCases <= MAX_PREV_CASES_FOR_PREV_SEEN) {
score = Score.SCORE_NONE;
}
return Optional.ofNullable(score);
}
/**
* Makes a new analysis result of a given type for a content and posts it to
* the blackboard.
*
* @param content The content.
* @param analysisResultType The type of analysis result to make.
* @param analysisResultAttrs The attributes of the new analysis result.
* @param configuration The configuration for the new analysis result.
* @param score The score for the new analysis result.
* @param justification The justification for the new analysis result.
* @param dataSourceObjId The data source object ID.
* @param ingestJobId The ingest job ID.
*
* @return The analysis result or null if the result already existed or an
* error that prevented creation of the analysis result occurred.
*/
private static Optional<AnalysisResult> makeAndPostAnalysisResult(Content content, BlackboardArtifact.Type analysisResultType, Collection<BlackboardAttribute> analysisResultAttrs, String configuration, Score score, String justification, long dataSourceObjId, long ingestJobId) {
AnalysisResult analysisResult = null;
try {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
if (!blackboard.artifactExists(content, analysisResultType, analysisResultAttrs)) {
analysisResult = content.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs, dataSourceObjId).getAnalysisResult();
try {
blackboard.postArtifact(analysisResult, MODULE_NAME, ingestJobId);
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, String.format("Error posting analysis result '%s' to blackboard for content 's' (job ID=%d)", analysisResult, content, ingestJobId), ex); //NON-NLS
}
}
} catch (NoCurrentCaseException | TskCoreException ex) {
LOGGER.log(Level.SEVERE, String.format("Error creating %s analysis result for content '%s' (job ID=%d)", analysisResultType, content, ingestJobId), ex); // NON-NLS
}
return Optional.ofNullable(analysisResult);
}
/**
* Posts a message to the ingest messages inbox to notify the user that a
* notable content has been found, i.e., a previously notable analysis
* result has been created.
*
* @param content The notable content.
* @param otherCases The other cases in which the content was marked as
* notable.
* @param corrAttrValue The correlation attribute value used to identify
* the content, used by the ingest inbox as a unique
* key for message grouping.
* @param analysisResult The previously notable analysis result.
*/
@NbBundle.Messages({
"# {0} - Name of item that is Notable",
"CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0}"
})
private static void postNotableMessage(Content content, Set<String> otherCases, String corrAttrValue, AnalysisResult analysisResult) {
String msgSubject = null;
String msgDetails = null;
String msgKey = corrAttrValue;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(file.getName());
msgDetails = makeNotableFileMessage(file, otherCases);
} else if (content instanceof DataArtifact) {
DataArtifact artifact = (DataArtifact) content;
msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(artifact.getDisplayName());
msgDetails = makeNotableDataArtifactMessage(artifact, corrAttrValue, otherCases);
} else {
LOGGER.log(Level.SEVERE, "Unsupported Content, cannot post ingest inbox message");
}
if (msgSubject != null && msgDetails != null) {
IngestServices.getInstance().postMessage(
IngestMessage.createDataMessage(
MODULE_NAME,
msgSubject,
msgDetails,
msgKey,
analysisResult));
}
}
/**
* Makes an ingest inbox message for a notable file. Uses similar HTML
* markup as is used for this purpose by the hash lookup ingest module.
*
* @param file The notable file.
* @param otherCases The cases other than the current case in which the file
* was marked as nmotable.
*
* @return The message.
*/
@NbBundle.Messages({
"CentralRepoIngestModule_filename_inbox_msg_header=File Name",
"CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash",
"CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases"
})
private static String makeNotableFileMessage(AbstractFile file, Set<String> otherCases) {
StringBuilder message = new StringBuilder(1024);
message.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_filename_inbox_msg_header(), file.getName());
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_md5Hash_inbox_msg_header(), file.getMd5Hash());
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(",")));
return message.toString();
}
/**
* Makes an ingest inbox message for a notable data artifact. Uses similar
* HTML markup as is used for this purpose by the hash lookup ingest module.
*
* @param artifact The data artifact
* @param corrAttrValue The notable attribute (correlation attribute value).
* @param otherCases The cases other than the current case in which the
* artifact was marked as nmotable.
*
* @return The message.
*/
@NbBundle.Messages({
"CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type",
"CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute"
})
private static String makeNotableDataArtifactMessage(DataArtifact artifact, String corrAttrValue, Set<String> otherCases) {
StringBuilder message = new StringBuilder(1024);
message.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_artifact_type_inbox_msg_header(), artifact.getDisplayName());
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_notable_attr_inbox_msg_header(), corrAttrValue);
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(",")));
message.append("</table>"); //NON-NLS
return message.toString();
}
/**
* Adds a table row to a notable item message (HTML).
*
* @param message The string builder for the message.
* @param headerText The table row header text.
* @param cellText The table row cell text.
*/
private static void addTableRowMarkup(StringBuilder message, String headerText, String cellText) {
message.append("<tr>"); //NON-NLS
message.append("<th>").append(headerText).append("</th>"); //NON-NLS
message.append("<td>").append(cellText).append("</td>"); //NON-NLS
message.append("</tr>"); //NON-NLS
}
/*
* Prevents instatiation of this utility class.
*/
private CentralRepoIngestModuleUtils() {
}
}

View File

@ -26,7 +26,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
final class IngestSettings implements IngestModuleIngestJobSettings {
private static final long serialVersionUID = 1L;
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false;
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false;
static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false;
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
private final boolean flagTaggedNotableItems;
private final boolean flagPreviousDevices;
private final boolean createCorrelationProperties;
@ -36,10 +40,10 @@ final class IngestSettings implements IngestModuleIngestJobSettings {
* Instantiate the ingest job settings with default values.
*/
IngestSettings() {
this.flagTaggedNotableItems = CentralRepoIngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS;
this.flagPreviousDevices = CentralRepoIngestModule.DEFAULT_FLAG_PREVIOUS_DEVICES;
this.createCorrelationProperties = CentralRepoIngestModule.DEFAULT_CREATE_CR_PROPERTIES;
this.flagUniqueArtifacts = CentralRepoIngestModule.DEFAULT_FLAG_UNIQUE_DEVICES;
this.flagTaggedNotableItems = DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS;
this.flagPreviousDevices = DEFAULT_FLAG_PREVIOUS_DEVICES;
this.createCorrelationProperties = DEFAULT_CREATE_CR_PROPERTIES;
this.flagUniqueArtifacts = DEFAULT_FLAG_UNIQUE_DEVICES;
}
/**

View File

@ -247,3 +247,11 @@ DataResultPanel.pagesLabel.text=Pages:
DataResultPanel.pageNumLabel.text=
DataResultPanel.pageNextButton.text=
DataResultPanel.pagePrevButton.text=
DataResultViewerThumbnail.pageLabel.text=Page:
DataResultViewerThumbnail.pagesLabel.text=Pages:
DataResultViewerThumbnail.pagePrevButton.text=
DataResultViewerThumbnail.pageNextButton.text=
DataResultViewerThumbnail.pageNumLabel.text=-
DataResultViewerThumbnail.goToPageLabel.text=Go to Page:
DataResultViewerThumbnail.goToPageField.text=

View File

@ -72,9 +72,9 @@ DataContentViewerHex.totalPageLabel.text_1=100
DataContentViewerHex.pageLabel2.text=Page
# Product Information panel
LBL_Description=<div style=\"font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif;\">\n <b>Product Version:</b> {0} ({9}) <br><b>Sleuth Kit Version:</b> {7} <br><b>Netbeans RCP Build:</b> {8} <br> <b>Java:</b> {1}; {2}<br> <b>System:</b> {3}; {4}; {5}<br><b>Userdir:</b> {6}</div>
LBL_Description=<div style="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif;">\n <b>Product Version:</b> {0} ({9}) <br><b>Sleuth Kit Version:</b> {7} <br><b>Netbeans RCP Build:</b> {8} <br> <b>Java:</b> {1}; {2}<br> <b>System:</b> {3}; {4}; {5}<br><b>Userdir:</b> {6}</div>
Format_OperatingSystem_Value={0} version {1} running on {2}
LBL_Copyright=<div style\="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif; ">Autopsy&trade; is a digital forensics platform based on The Sleuth Kit&trade; and other tools. <br><ul><li>General Information: <a style\="color: \#1E2A60;" href\="http://www.sleuthkit.org">http://www.sleuthkit.org</a>.</li><li>Training: <a style\="color: \#1E2A60;" href\="https://www.autopsy.com/support/training/">https://www.autopsy.com/support/training/</a></li><li>Support: <a style\="color: \#1E2A60;" href\="https://www.sleuthkit.org/support.php">https://www.sleuthkit.org/support.php</a></li></ul>Copyright &copy; 2003-2020. </div>
LBL_Copyright=<div style="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif; ">Autopsy&trade; is a digital forensics platform based on The Sleuth Kit&trade; and other tools. <br><ul><li>General Information: <a style="color: #1E2A60;" href="http://www.sleuthkit.org">http://www.sleuthkit.org</a>.</li><li>Training: <a style="color: #1E2A60;" href="https://www.autopsy.com/support/training/">https://www.autopsy.com/support/training/</a></li><li>Support: <a style="color: #1E2A60;" href="https://www.sleuthkit.org/support.php">https://www.sleuthkit.org/support.php</a></li></ul>Copyright &copy; 2003-2020. </div>
SortChooser.dialogTitle=Choose Sort Criteria
ThumbnailViewChildren.progress.cancelling=(Cancelling)
# {0} - file name
@ -97,7 +97,7 @@ DataContentViewerHex.goToPageTextField.text=
DataContentViewerHex.goToPageLabel.text=Go to Page:
DataResultViewerThumbnail.imagesLabel.text=Images:
DataResultViewerThumbnail.imagesRangeLabel.text=-
DataResultViewerThumbnail.filePathLabel.text=\ \ \
DataResultViewerThumbnail.filePathLabel.text=\
AdvancedConfigurationDialog.cancelButton.text=Cancel
DataArtifactContentViewer.waitText=Retrieving and preparing data, please wait...
DataArtifactContentViewer.errorText=Error retrieving result
@ -311,3 +311,11 @@ DataResultPanel.pagesLabel.text=Pages:
DataResultPanel.pageNumLabel.text=
DataResultPanel.pageNextButton.text=
DataResultPanel.pagePrevButton.text=
DataResultViewerThumbnail.pageLabel.text=Page:
DataResultViewerThumbnail.pagesLabel.text=Pages:
DataResultViewerThumbnail.pagePrevButton.text=
DataResultViewerThumbnail.pageNextButton.text=
DataResultViewerThumbnail.pageNumLabel.text=-
DataResultViewerThumbnail.goToPageLabel.text=Go to Page:
DataResultViewerThumbnail.goToPageField.text=

View File

@ -62,11 +62,14 @@ import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageCountChangeEvent;
import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageSizeChangeEvent;
import org.sleuthkit.autopsy.datamodel.NodeSelectionInfo;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSetFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsDAO.CommAccountFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
@ -76,8 +79,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.OsAccountsDAO.AccountFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.OsAccountsSearchParams;
import org.sleuthkit.autopsy.mainui.nodes.SearchResultRootNode;
@ -87,7 +90,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeExtFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeMimeFetcher;
import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeSizeFetcher;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.autopsy.mainui.nodes.SearchManager;
/**
@ -140,21 +142,9 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
private final PreferenceChangeListener pageSizeListener = (PreferenceChangeEvent evt) -> {
if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) {
int newPageSize = UserPreferences.getResultsTablePageSize();
nodeNameToPageCountListenerMap.values().forEach((ps) -> {
ps.postPageSizeChangeEvent();
});
try {
if (this.searchResultManager != null) {
DAOFetcher<?> previousFetcher = this.searchResultManager.getDaoFetcher();
this.searchResultManager = new SearchManager(previousFetcher, newPageSize);
displaySearchResults(this.searchResultManager.getResults(), false);
}
} catch (IllegalArgumentException | ExecutionException ex) {
logger.log(Level.WARNING, "There was an error while updating page size", ex);
}
}
};
@ -162,10 +152,11 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
private final PropertyChangeListener caseEventListener = evt -> {
String evtName = evt.getPropertyName();
if (Case.Events.DATA_SOURCE_ADDED.toString().equals(evtName)) {
refreshSearchResultChildren();
} else if (Case.Events.CURRENT_CASE.toString().equals(evtName) && evt.getNewValue() == null) {
nodeNameToPageCountListenerMap.clear();
if (Case.Events.CURRENT_CASE.toString().equals(evtName)) {
searchResultManager = null;
if (evt.getNewValue() == null) {
nodeNameToPageCountListenerMap.clear();
}
}
};
@ -176,27 +167,19 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
IngestManager.IngestModuleEvent.CONTENT_CHANGED,
IngestManager.IngestModuleEvent.DATA_ADDED);
private final PropertyChangeListener ingestModuleListener = evt -> {
if (this.searchResultManager != null && this.searchResultManager.isRefreshRequired(evt)) {
refreshSearchResultChildren();
private final MainDAO mainDAO = MainDAO.getInstance();
private final PropertyChangeListener DAOListener = evt -> {
SearchManager manager = this.searchResultManager;
if (manager != null && evt != null && evt.getNewValue() instanceof DAOAggregateEvent) {
DAOAggregateEvent daoAggrEvt = (DAOAggregateEvent) evt.getNewValue();
if (daoAggrEvt.getEvents().stream().anyMatch((daoEvt) -> manager.isRefreshRequired(daoEvt))) {
refreshSearchResultChildren();
}
}
};
private final PropertyChangeListener weakIngestModuleListener = WeakListeners.propertyChange(ingestModuleListener, null);
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS = EnumSet.of(
IngestManager.IngestJobEvent.COMPLETED,
IngestManager.IngestJobEvent.CANCELLED);
private final PropertyChangeListener ingestJobListener = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
refreshSearchResultChildren();
}
};
private final PropertyChangeListener weakIngestJobListener = WeakListeners.propertyChange(ingestJobListener, null);
private final PropertyChangeListener weakDAOListener = WeakListeners.propertyChange(DAOListener, mainDAO);
/**
* Creates and opens a Swing JPanel with a JTabbedPane child component that
@ -461,8 +444,8 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
private void initListeners() {
UserPreferences.addChangeListener(this.pageSizeListener);
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this.weakCaseEventListener);
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakIngestModuleListener);
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener);
this.mainDAO.getResultEventsManager().addPropertyChangeListener(this.weakDAOListener);
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakDAOListener);
}
/**
@ -471,8 +454,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
private void closeListeners() {
UserPreferences.removeChangeListener(this.pageSizeListener);
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), this.weakCaseEventListener);
IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakIngestModuleListener);
IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener);
this.mainDAO.getResultEventsManager().removePropertyChangeListener(this.weakDAOListener);
}
/**
@ -512,7 +494,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
// if search result root node, it's fine; otherwise, wrap in result
// viewer filter node to make sure there are no grandchildren
this.currentRootNode = (rootNode instanceof SearchResultRootNode)
this.currentRootNode = (rootNode instanceof SearchResultRootNode)
? rootNode
: new ResultViewerFilterParentNode(rootNode);
@ -1187,6 +1169,27 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
}
}
/**
* Displays results for querying the DAO for accounts matching the search
* parameters query.
*
* @param accountParams The search parameter query.
*/
void displayAccounts(CommAccountsSearchParams accountParams) {
try {
this.searchResultManager = new SearchManager(new CommAccountFetcher(accountParams), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true);
} catch (ExecutionException ex) {
logger.log(Level.WARNING,
MessageFormat.format("There was an error displaying search results for [artifact type: {0}, data source id: {1}, account type: {2}]",
accountParams.getType(),
accountParams.getDataSourceId() == null ? "<null>" : accountParams.getDataSourceId(),
accountParams.getType()),
ex);
}
}
void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) {
try {
this.searchResultManager = new SearchManager(new AnalysisResultFetcher(analysisResultParams), getPageSize());
@ -1270,7 +1273,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
*/
void displayFileSizes(FileTypeSizeSearchParams fileSizeKey) {
try {
this.searchResultManager = new SearchManager(new FileTypeSizeFetcher(fileSizeKey), getPageSize());
this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeSizeFetcher(fileSizeKey), getPageSize());
SearchResultsDTO results = searchResultManager.getResults();
displaySearchResults(results, true);
} catch (ExecutionException | IllegalArgumentException ex) {
@ -1321,7 +1324,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
ex);
}
}
/**
* Displays results of querying the DAO for the given search parameters
* query.
@ -1361,7 +1364,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
ex);
}
}
/**
* Displays results of querying the DAO for the given search parameters
* query.
@ -1420,7 +1423,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
*/
private void refreshSearchResultChildren() {
try {
refreshSearchResultChildren(this.searchResultManager.getRefreshedData());
refreshSearchResultChildren(this.searchResultManager.getResults());
} catch (ExecutionException | IllegalArgumentException ex) {
logger.log(Level.WARNING, "There was an error refreshing data: ", ex);
}
@ -1513,6 +1516,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C
/**
* Main constructor.
*
* @param original The original node to wrap.
*/
ResultViewerFilterParentNode(Node original) {

View File

@ -46,6 +46,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
@ -481,6 +482,16 @@ public final class DataResultTopComponent extends TopComponent implements DataRe
public void displayOsAccounts(OsAccountsSearchParams osAccountParams) {
dataResultPanel.displayOsAccount(osAccountParams);
}
/**
* Displays results for querying the DAO for accounts matching the search
* parameters query.
*
* @param accountParams The search parameter query.
*/
public void displayAccounts(CommAccountsSearchParams accountParams) {
dataResultPanel.displayAccounts(accountParams);
}
@Override
public void setTitle(String title) {

View File

@ -528,13 +528,15 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
if (rootNode == null || propertiesMap.isEmpty()) {
return;
}
if (rootNode instanceof TableFilterNode) {
TableFilterNode tfn = (TableFilterNode) rootNode;
if (rootNode instanceof TableFilterNode || searchResults != null) {
TableFilterNode tfn = searchResults == null ? (TableFilterNode) rootNode : null;
final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class);
final ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel();
for (Map.Entry<String, ETableColumn> entry : columnMap.entrySet()) {
String columnName = entry.getKey();
final String columnHiddenKey = ResultViewerPersistence.getColumnHiddenKey(tfn, columnName);
final String columnHiddenKey =
tfn != null ? ResultViewerPersistence.getColumnHiddenKey(tfn, columnName) :
ResultViewerPersistence.getColumnHiddenKey(searchResults, columnName);
final TableColumn column = entry.getValue();
boolean columnHidden = columnModel.isColumnHidden(column);
if (columnHidden) {
@ -554,12 +556,14 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
if (rootNode == null || propertiesMap.isEmpty()) {
return;
}
if (rootNode instanceof TableFilterNode) {
TableFilterNode tfn = (TableFilterNode) rootNode;
if (rootNode instanceof TableFilterNode || searchResults != null) {
TableFilterNode tfn = searchResults == null ? (TableFilterNode) rootNode : null;
final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class);
// Store the current order of the columns into settings
for (Map.Entry<Integer, Property<?>> entry : propertiesMap.entrySet()) {
preferences.putInt(ResultViewerPersistence.getColumnPositionKey(tfn, entry.getValue().getName()), entry.getKey());
preferences.putInt(tfn != null ?
ResultViewerPersistence.getColumnPositionKey(tfn, entry.getValue().getName()) :
ResultViewerPersistence.getColumnPositionKey(searchResults, entry.getValue().getName()), entry.getKey());
}
}
}
@ -571,16 +575,20 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
if (rootNode == null || propertiesMap.isEmpty()) {
return;
}
if (rootNode instanceof TableFilterNode) {
final TableFilterNode tfn = ((TableFilterNode) rootNode);
if (rootNode instanceof TableFilterNode || searchResults != null) {
final TableFilterNode tfn = searchResults == null ? ((TableFilterNode) rootNode) : null;
final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class);
ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel();
for (Map.Entry<String, ETableColumn> entry : columnMap.entrySet()) {
ETableColumn etc = entry.getValue();
String columnName = entry.getKey();
//store sort rank and order
final String columnSortOrderKey = ResultViewerPersistence.getColumnSortOrderKey(tfn, columnName);
final String columnSortRankKey = ResultViewerPersistence.getColumnSortRankKey(tfn, columnName);
final String columnSortOrderKey =
searchResults == null ? ResultViewerPersistence.getColumnSortOrderKey(tfn, columnName) :
ResultViewerPersistence.getColumnSortOrderKey(searchResults, columnName);
final String columnSortRankKey =
searchResults == null ? ResultViewerPersistence.getColumnSortRankKey(tfn, columnName):
ResultViewerPersistence.getColumnSortRankKey(searchResults, columnName);
if (etc.isSorted() && (columnModel.isColumnHidden(etc) == false)) {
preferences.putBoolean(columnSortOrderKey, etc.isAscending());
preferences.putInt(columnSortRankKey, etc.getSortRank());
@ -590,7 +598,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
preferences.remove(columnSortRankKey);
}
}
}
}
}
/**
@ -603,17 +611,23 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
if (rootNode == null || propertiesMap.isEmpty()) {
return;
}
if (rootNode instanceof TableFilterNode) {
final TableFilterNode tfn = (TableFilterNode) rootNode;
if (rootNode instanceof TableFilterNode || searchResults != null) {
final TableFilterNode tfn = (searchResults == null ? (TableFilterNode) rootNode : null);
final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class);
//organize property sorting information, sorted by rank
TreeSet<ColumnSortInfo> sortInfos = new TreeSet<>(Comparator.comparing(ColumnSortInfo::getRank));
propertiesMap.entrySet().stream().forEach(entry -> {
final String propName = entry.getValue().getName();
//if the sort rank is undefined, it will be defaulted to 0 => unsorted.
Integer sortRank = preferences.getInt(ResultViewerPersistence.getColumnSortRankKey(tfn, propName), 0);
Integer sortRank = preferences.getInt(
tfn != null ?
ResultViewerPersistence.getColumnSortRankKey(tfn, propName) :
ResultViewerPersistence.getColumnSortRankKey(searchResults, propName), 0);
//default to true => ascending
Boolean sortOrder = preferences.getBoolean(ResultViewerPersistence.getColumnSortOrderKey(tfn, propName), true);
Boolean sortOrder = preferences.getBoolean(
tfn != null ?
ResultViewerPersistence.getColumnSortOrderKey(tfn, propName) :
ResultViewerPersistence.getColumnSortOrderKey(searchResults, propName), true);
sortInfos.add(new ColumnSortInfo(entry.getKey(), sortRank, sortOrder));
});
//apply sort information in rank order.
@ -629,13 +643,16 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
if (rootNode == null || propertiesMap.isEmpty()) {
return;
}
if (rootNode instanceof TableFilterNode) {
if (rootNode instanceof TableFilterNode || searchResults != null) {
final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class);
final TableFilterNode tfn = ((TableFilterNode) rootNode);
final TableFilterNode tfn = (searchResults == null ? ((TableFilterNode) rootNode) : null);
ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel();
for (Map.Entry<Integer, Property<?>> entry : propertiesMap.entrySet()) {
final String propName = entry.getValue().getName();
boolean hidden = preferences.getBoolean(ResultViewerPersistence.getColumnHiddenKey(tfn, propName), false);
boolean hidden = preferences.getBoolean(
tfn != null ?
ResultViewerPersistence.getColumnHiddenKey(tfn, propName) :
ResultViewerPersistence.getColumnHiddenKey(searchResults, propName), false);
final TableColumn column = columnMap.get(propName);
columnModel.setColumnHidden(column, hidden);
}
@ -653,16 +670,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
private synchronized List<Node.Property<?>> loadColumnOrder() {
if (searchResults != null) {
return searchResults.getColumns().stream()
.map(columnKey -> {
return new NodeProperty<>(
columnKey.getFieldName(),
columnKey.getDisplayName(),
columnKey.getDescription(),
""
);
})
.collect(Collectors.toList());
return loadColumnOrderForSearchResults();
}
List<Property<?>> props = ResultViewerPersistence.getAllChildProperties(rootNode, 100);
@ -705,6 +713,51 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
return new ArrayList<>(propertiesMap.values());
}
private synchronized List<Node.Property<?>> loadColumnOrderForSearchResults() {
List<Node.Property<?>> props = searchResults.getColumns().stream()
.map(columnKey -> {
return new NodeProperty<>(
columnKey.getFieldName(),
columnKey.getDisplayName(),
columnKey.getDescription(),
""
);
})
.collect(Collectors.toList());
propertiesMap.clear();
/*
* We load column index values into the properties map. If a property's
* index is outside the range of the number of properties or the index
* has already appeared as the position of another property, we put that
* property at the end.
*/
int offset = props.size();
final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class);
for (Property<?> prop : props) {
Integer value = preferences.getInt(ResultViewerPersistence.getColumnPositionKey(searchResults, prop.getName()), -1);
if (value >= 0 && value < offset && !propertiesMap.containsKey(value)) {
propertiesMap.put(value, prop);
} else {
propertiesMap.put(offset, prop);
offset++;
}
}
/*
* NOTE: it is possible to have "discontinuities" in the keys (i.e.
* column numbers) of the map. This happens when some of the columns had
* a previous setting, and other columns did not. We need to make the
* keys 0-indexed and continuous.
*/
compactPropertiesMap();
return new ArrayList<>(propertiesMap.values());
}
/**
* Makes properties map 0-indexed and re-arranges elements to make sure the

View File

@ -27,6 +27,200 @@
<Property name="alignment" type="int" value="0"/>
</Layout>
<SubComponents>
<Container class="javax.swing.JPanel" name="pagesPanel">
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
<SubComponents>
<Container class="javax.swing.JPanel" name="pageNumberPane">
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="-1" gridY="-1" gridWidth="1" gridHeight="1" fill="3" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="0" anchor="23" weightX="0.0" weightY="0.0"/>
</Constraint>
</Constraints>
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
<SubComponents>
<Component class="javax.swing.JLabel" name="pageLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.pageLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="0" gridWidth="1" gridHeight="1" fill="3" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="9" anchor="23" weightX="0.0" weightY="1.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JLabel" name="pageNumLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.pageNumLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="1" gridY="0" gridWidth="1" gridHeight="1" fill="1" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="15" anchor="23" weightX="1.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
</SubComponents>
</Container>
</SubComponents>
</Container>
<Container class="javax.swing.JPanel" name="pageButtonPanel">
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
<SubComponents>
<Component class="javax.swing.JLabel" name="pagesLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.pagesLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="0" gridWidth="1" gridHeight="1" fill="3" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="9" anchor="17" weightX="0.0" weightY="1.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JButton" name="pagePrevButton">
<Properties>
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/corecomponents/btn_step_back.png"/>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.pagePrevButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
<Border info="null"/>
</Property>
<Property name="disabledIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/corecomponents/btn_step_back_disabled.png"/>
</Property>
<Property name="focusable" type="boolean" value="false"/>
<Property name="horizontalTextPosition" type="int" value="0"/>
<Property name="margin" type="java.awt.Insets" editor="org.netbeans.beaninfo.editors.InsetsEditor">
<Insets value="[0, 0, 0, 0]"/>
</Property>
<Property name="rolloverIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/corecomponents/btn_step_back_hover.png"/>
</Property>
<Property name="verticalTextPosition" type="int" value="3"/>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="pagePrevButtonActionPerformed"/>
</Events>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="1" gridY="0" gridWidth="1" gridHeight="2" fill="0" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="0" anchor="23" weightX="0.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JButton" name="pageNextButton">
<Properties>
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/corecomponents/btn_step_forward.png"/>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.pageNextButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
<Border info="null"/>
</Property>
<Property name="disabledIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/corecomponents/btn_step_forward_disabled.png"/>
</Property>
<Property name="focusable" type="boolean" value="false"/>
<Property name="horizontalTextPosition" type="int" value="0"/>
<Property name="margin" type="java.awt.Insets" editor="org.netbeans.beaninfo.editors.InsetsEditor">
<Insets value="[0, 0, 0, 0]"/>
</Property>
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[27, 23]"/>
</Property>
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[27, 23]"/>
</Property>
<Property name="rolloverIcon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/corecomponents/btn_step_forward_hover.png"/>
</Property>
<Property name="verticalTextPosition" type="int" value="3"/>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="pageNextButtonActionPerformed"/>
</Events>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="2" gridY="0" gridWidth="1" gridHeight="2" fill="0" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="15" anchor="23" weightX="0.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
</SubComponents>
</Container>
<Container class="javax.swing.JPanel" name="pageGotoPane">
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
<SubComponents>
<Component class="javax.swing.JLabel" name="goToPageLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.goToPageLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="0" gridWidth="1" gridHeight="1" fill="3" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="9" anchor="17" weightX="0.0" weightY="1.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JTextField" name="goToPageField">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.goToPageField.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="goToPageFieldActionPerformed"/>
</Events>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="1" gridY="0" gridWidth="1" gridHeight="2" fill="0" ipadX="75" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="15" anchor="23" weightX="1.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
</SubComponents>
</Container>
<Container class="javax.swing.JPanel" name="imagePane">
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
<SubComponents>
<Component class="javax.swing.JLabel" name="imagesLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.imagesLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="-1" gridY="-1" gridWidth="1" gridHeight="1" fill="0" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="9" anchor="10" weightX="0.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JLabel" name="imagesRangeLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/corecomponents/Bundle.properties" key="DataResultViewerThumbnail.imagesRangeLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="-1" gridY="-1" gridWidth="1" gridHeight="1" fill="0" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="0" insetsBottom="0" insetsRight="15" anchor="10" weightX="0.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
</SubComponents>
</Container>
<Component class="javax.swing.JComboBox" name="thumbnailSizeComboBox">
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="thumbnailSizeComboBoxActionPerformed"/>

View File

@ -27,9 +27,9 @@ import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.logging.Level;
import java.util.prefs.PreferenceChangeListener;
import java.util.prefs.Preferences;
import java.util.stream.Collectors;
import javax.swing.JOptionPane;
import javax.swing.ListSelectionModel;
import javax.swing.SortOrder;
import javax.swing.SwingUtilities;
@ -42,10 +42,13 @@ import org.openide.explorer.ExplorerManager;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.NodeEvent;
import org.openide.nodes.NodeListener;
import org.openide.nodes.NodeMemberEvent;
import org.openide.nodes.NodeReorderEvent;
import org.openide.util.NbBundle;
import org.openide.util.NbPreferences;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataResultViewer;
import static org.sleuthkit.autopsy.corecomponents.Bundle.*;
import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion;
@ -73,9 +76,13 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(DataResultViewerThumbnail.class.getName());
private final PageUpdater pageUpdater = new PageUpdater();
private TableFilterNode rootNode;
private ThumbnailViewChildren rootNodeChildren;
private NodeSelectionListener selectionListener;
private int currentPage;
private int totalPages;
private int currentPageImages;
private int thumbSize = ImageUtils.ICON_SIZE_MEDIUM;
/**
@ -112,7 +119,10 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
Bundle.DataResultViewerThumbnail_thumbnailSizeComboBox_medium(),
Bundle.DataResultViewerThumbnail_thumbnailSizeComboBox_large()}));
thumbnailSizeComboBox.setSelectedIndex(1);
currentPage = -1;
totalPages = 0;
currentPageImages = 0;
// The GUI builder is using FlowLayout therefore this change so have no
// impact on the initally designed layout. This change will just effect
// how the components are laid out as size of the window changes.
@ -130,6 +140,20 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
java.awt.GridBagConstraints gridBagConstraints;
buttonBarPanel = new javax.swing.JPanel();
pagesPanel = new javax.swing.JPanel();
pageNumberPane = new javax.swing.JPanel();
pageLabel = new javax.swing.JLabel();
pageNumLabel = new javax.swing.JLabel();
pageButtonPanel = new javax.swing.JPanel();
pagesLabel = new javax.swing.JLabel();
pagePrevButton = new javax.swing.JButton();
pageNextButton = new javax.swing.JButton();
pageGotoPane = new javax.swing.JPanel();
goToPageLabel = new javax.swing.JLabel();
goToPageField = new javax.swing.JTextField();
imagePane = new javax.swing.JPanel();
imagesLabel = new javax.swing.JLabel();
imagesRangeLabel = new javax.swing.JLabel();
thumbnailSizeComboBox = new javax.swing.JComboBox<>();
sortPane = new javax.swing.JPanel();
sortLabel = new javax.swing.JLabel();
@ -141,6 +165,140 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
buttonBarPanel.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT));
pagesPanel.setLayout(new java.awt.GridBagLayout());
pageNumberPane.setLayout(new java.awt.GridBagLayout());
pageLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9);
pageNumberPane.add(pageLabel, gridBagConstraints);
pageNumLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageNumLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15);
pageNumberPane.add(pageNumLabel, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START;
pagesPanel.add(pageNumberPane, gridBagConstraints);
buttonBarPanel.add(pagesPanel);
pageButtonPanel.setLayout(new java.awt.GridBagLayout());
pagesLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pagesLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9);
pageButtonPanel.add(pagesLabel, gridBagConstraints);
pagePrevButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back.png"))); // NOI18N
pagePrevButton.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pagePrevButton.text")); // NOI18N
pagePrevButton.setBorder(null);
pagePrevButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_disabled.png"))); // NOI18N
pagePrevButton.setFocusable(false);
pagePrevButton.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
pagePrevButton.setMargin(new java.awt.Insets(0, 0, 0, 0));
pagePrevButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_hover.png"))); // NOI18N
pagePrevButton.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
pagePrevButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
pagePrevButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridheight = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START;
pageButtonPanel.add(pagePrevButton, gridBagConstraints);
pageNextButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward.png"))); // NOI18N
pageNextButton.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageNextButton.text")); // NOI18N
pageNextButton.setBorder(null);
pageNextButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_disabled.png"))); // NOI18N
pageNextButton.setFocusable(false);
pageNextButton.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
pageNextButton.setMargin(new java.awt.Insets(0, 0, 0, 0));
pageNextButton.setMaximumSize(new java.awt.Dimension(27, 23));
pageNextButton.setMinimumSize(new java.awt.Dimension(27, 23));
pageNextButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_hover.png"))); // NOI18N
pageNextButton.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
pageNextButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
pageNextButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridheight = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15);
pageButtonPanel.add(pageNextButton, gridBagConstraints);
buttonBarPanel.add(pageButtonPanel);
pageGotoPane.setLayout(new java.awt.GridBagLayout());
goToPageLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.goToPageLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9);
pageGotoPane.add(goToPageLabel, gridBagConstraints);
goToPageField.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.goToPageField.text")); // NOI18N
goToPageField.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
goToPageFieldActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridheight = 2;
gridBagConstraints.ipadx = 75;
gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15);
pageGotoPane.add(goToPageField, gridBagConstraints);
buttonBarPanel.add(pageGotoPane);
imagePane.setLayout(new java.awt.GridBagLayout());
imagesLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.imagesLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9);
imagePane.add(imagesLabel, gridBagConstraints);
imagesRangeLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.imagesRangeLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15);
imagePane.add(imagesRangeLabel, gridBagConstraints);
buttonBarPanel.add(imagePane);
thumbnailSizeComboBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
thumbnailSizeComboBoxActionPerformed(evt);
@ -181,6 +339,18 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
add(iconView, java.awt.BorderLayout.CENTER);
}// </editor-fold>//GEN-END:initComponents
private void pagePrevButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pagePrevButtonActionPerformed
previousPage();
}//GEN-LAST:event_pagePrevButtonActionPerformed
private void pageNextButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pageNextButtonActionPerformed
nextPage();
}//GEN-LAST:event_pageNextButtonActionPerformed
private void goToPageFieldActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_goToPageFieldActionPerformed
goToPage(goToPageField.getText());
}//GEN-LAST:event_goToPageFieldActionPerformed
private void thumbnailSizeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_thumbnailSizeComboBoxActionPerformed
int newIconSize;
switch (thumbnailSizeComboBox.getSelectedIndex()) {
@ -199,14 +369,14 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
if (thumbSize != newIconSize) {
thumbSize = newIconSize;
Node root = this.getExplorerManager().getRootContext();
this.rootNodeChildren.setThumbsSize(thumbSize);
((ThumbnailViewChildren) root.getChildren()).setThumbsSize(thumbSize);
// Temporarily set the explored context to the root, instead of a child node.
// This is a workaround hack to convince org.openide.explorer.ExplorerManager to
// update even though the new and old Node values are identical. This in turn
// will cause the entire view to update completely. After this we
// immediately set the node back to the current child by calling switchPage().
this.getExplorerManager().setExploredContext(this.rootNode);
this.getExplorerManager().setExploredContext(root);
switchPage();
}
}//GEN-LAST:event_thumbnailSizeComboBoxActionPerformed
@ -253,7 +423,21 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPanel buttonBarPanel;
private javax.swing.JLabel filePathLabel;
private javax.swing.JTextField goToPageField;
private javax.swing.JLabel goToPageLabel;
private org.openide.explorer.view.IconView iconView;
private javax.swing.JPanel imagePane;
private javax.swing.JLabel imagesLabel;
private javax.swing.JLabel imagesRangeLabel;
private javax.swing.JPanel pageButtonPanel;
private javax.swing.JPanel pageGotoPane;
private javax.swing.JLabel pageLabel;
private javax.swing.JButton pageNextButton;
private javax.swing.JLabel pageNumLabel;
private javax.swing.JPanel pageNumberPane;
private javax.swing.JButton pagePrevButton;
private javax.swing.JLabel pagesLabel;
private javax.swing.JPanel pagesPanel;
private javax.swing.JButton sortButton;
private javax.swing.JLabel sortLabel;
private javax.swing.JPanel sortPane;
@ -265,7 +449,7 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
return (selectedNode != null);
}
@Override
@Override
public void setNode(Node givenNode) {
setNode(givenNode, null);
}
@ -273,7 +457,7 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
@Override
public void setNode(Node givenNode, SearchResultsDTO searchResults) {
// GVDTODO givenNode cannot be assumed to be a table filter node and search results needs to be captured.
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
if (selectionListener == null) {
this.getExplorerManager().addPropertyChangeListener(new NodeSelectionListener());
@ -289,19 +473,23 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
// case where the DataResultViewerThumbnail stands along from the
// DataResultViewer. See DataResultViewer setNode for more information.
if (givenNode != null && givenNode.getChildren().getNodesCount() > 0) {
// GVDTODO this should be handled more elegantly
rootNode = (givenNode instanceof TableFilterNode)
? (TableFilterNode) givenNode
rootNode = (givenNode instanceof TableFilterNode)
? (TableFilterNode) givenNode
: new TableFilterNode(givenNode, true);
/*
* Wrap the given node in a ThumbnailViewChildren that will
* produce ThumbnailPageNodes with ThumbnailViewNode children
* from the child nodes of the given node.
*/
rootNodeChildren = new ThumbnailViewChildren(rootNode, thumbSize);
final Node root = new AbstractNode(Children.create(rootNodeChildren, true));
rootNodeChildren = new ThumbnailViewChildren(givenNode, thumbSize);
final Node root = new AbstractNode(rootNodeChildren);
pageUpdater.setRoot(root);
root.addNodeListener(pageUpdater);
this.getExplorerManager().setRootContext(root);
} else {
rootNode = null;
@ -328,7 +516,9 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
@Override
public void resetComponent() {
super.resetComponent();
setNode(null);
this.totalPages = 0;
this.currentPage = -1;
currentPageImages = 0;
updateControls();
}
@ -339,15 +529,59 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
super.clearComponent();
}
private void switchPage() {
private void nextPage() {
if (currentPage < totalPages) {
currentPage++;
switchPage();
}
}
private void previousPage() {
if (currentPage > 1) {
currentPage--;
switchPage();
}
}
private void goToPage(String pageNumText) {
int newPage;
try {
newPage = Integer.parseInt(pageNumText);
} catch (NumberFormatException e) {
//ignore input
return;
}
if (newPage > totalPages || newPage < 1) {
JOptionPane.showMessageDialog(this,
NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.goToPageTextField.msgDlg", totalPages),
NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.goToPageTextField.err"),
JOptionPane.WARNING_MESSAGE);
return;
}
currentPage = newPage;
switchPage();
}
private void switchPage() {
SwingUtilities.invokeLater(() -> {
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
try {
pagePrevButton.setEnabled(false);
pageNextButton.setEnabled(false);
goToPageField.setEnabled(false);
ProgressHandle progress = ProgressHandle.createHandle(
NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.genThumbs"));
progress.start();
progress.switchToIndeterminate();
DataResultViewerThumbnail.this.rootNodeChildren.update();
ExplorerManager explorerManager = DataResultViewerThumbnail.this.getExplorerManager();
Node root = explorerManager.getRootContext();
Node pageNode = root.getChildren().getNodeAt(currentPage - 1);
explorerManager.setExploredContext(pageNode);
currentPageImages = pageNode.getChildren().getNodesCount();
progress.finish();
} catch (Exception ex) {
NotifyDescriptor d
@ -368,12 +602,26 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
"# {0} - sort criteria", "DataResultViewerThumbnail.sortLabel.textTemplate=Sorted by: {0}",
"DataResultViewerThumbnail.sortLabel.text=Sorted by: ---"})
private void updateControls() {
if (rootNode != null && rootNode.getChildren().getNodesCount(true) > 0) {
if (totalPages == 0) {
pagePrevButton.setEnabled(false);
pageNextButton.setEnabled(false);
goToPageField.setEnabled(false);
pageNumLabel.setText("");
imagesRangeLabel.setText("");
thumbnailSizeComboBox.setEnabled(false);
sortButton.setEnabled(false);
sortLabel.setText(DataResultViewerThumbnail_sortLabel_text());
} else {
pageNumLabel.setText(NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.pageNumbers.curOfTotal",
Integer.toString(currentPage), Integer.toString(totalPages)));
final int imagesFrom = (currentPage - 1) * ThumbnailViewChildren.IMAGES_PER_PAGE + 1;
final int imagesTo = currentPageImages + (currentPage - 1) * ThumbnailViewChildren.IMAGES_PER_PAGE;
imagesRangeLabel.setText(imagesFrom + "-" + imagesTo);
pageNextButton.setEnabled(!(currentPage == totalPages));
pagePrevButton.setEnabled(!(currentPage == 1));
goToPageField.setEnabled(totalPages > 1);
sortButton.setEnabled(true);
thumbnailSizeComboBox.setEnabled(true);
if (rootNode != null) {
@ -388,6 +636,88 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
}
}
/**
* Listens for root change updates and updates the paging controls
*/
private class PageUpdater implements NodeListener {
private Node root;
void setRoot(Node root) {
this.root = root;
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
}
@Override
public void childrenAdded(NodeMemberEvent nme) {
totalPages = root.getChildren().getNodesCount();
if (totalPages == 0) {
currentPage = -1;
updateControls();
return;
}
if (currentPage == -1 || currentPage > totalPages) {
currentPage = 1;
}
//force load the curPage node
final Node pageNode = root.getChildren().getNodeAt(currentPage - 1);
//em.setSelectedNodes(new Node[]{pageNode});
if (pageNode != null) {
pageNode.addNodeListener(new NodeListener() {
@Override
public void childrenAdded(NodeMemberEvent nme) {
currentPageImages = pageNode.getChildren().getNodesCount();
updateControls();
}
@Override
public void childrenRemoved(NodeMemberEvent nme) {
currentPageImages = 0;
updateControls();
}
@Override
public void childrenReordered(NodeReorderEvent nre) {
}
@Override
public void nodeDestroyed(NodeEvent ne) {
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
}
});
DataResultViewerThumbnail.this.getExplorerManager().setExploredContext(pageNode);
}
updateControls();
}
@Override
public void childrenRemoved(NodeMemberEvent nme) {
totalPages = 0;
currentPage = -1;
updateControls();
}
@Override
public void childrenReordered(NodeReorderEvent nre) {
}
@Override
public void nodeDestroyed(NodeEvent ne) {
}
}
private class NodeSelectionListener implements PropertyChangeListener {
@Override
@ -417,5 +747,5 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer {
}
}
}
}
}
}

View File

@ -28,6 +28,7 @@ import javax.swing.SortOrder;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.util.NbPreferences;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
final class ResultViewerPersistence {
@ -46,6 +47,10 @@ final class ResultViewerPersistence {
static String getColumnPositionKey(TableFilterNode node, String propName) {
return getColumnKeyBase(node, propName) + ".column";
}
static String getColumnPositionKey(SearchResultsDTO searchResult, String propName) {
return getColumnKeyBase(searchResult, propName) + ".column";
}
/**
* Gets a key for the given node and a property of its child nodes to store
@ -59,6 +64,10 @@ final class ResultViewerPersistence {
static String getColumnSortOrderKey(TableFilterNode node, String propName) {
return getColumnKeyBase(node, propName) + ".sortOrder";
}
static String getColumnSortOrderKey(SearchResultsDTO searchResult, String propName) {
return getColumnKeyBase(searchResult, propName) + ".sortOrder";
}
/**
* Gets a key for the given node and a property of its child nodes to store
@ -72,6 +81,10 @@ final class ResultViewerPersistence {
static String getColumnSortRankKey(TableFilterNode node, String propName) {
return getColumnKeyBase(node, propName) + ".sortRank";
}
static String getColumnSortRankKey(SearchResultsDTO searchResult, String propName) {
return getColumnKeyBase(searchResult, propName) + ".sortRank";
}
/**
* Gets a key for the given node and a property of its child nodes to store
@ -85,10 +98,18 @@ final class ResultViewerPersistence {
static String getColumnHiddenKey(TableFilterNode node, String propName) {
return getColumnKeyBase(node, propName) + ".hidden";
}
static String getColumnHiddenKey(SearchResultsDTO searchResult, String propName) {
return getColumnKeyBase(searchResult, propName) + ".hidden";
}
private static String getColumnKeyBase(TableFilterNode node, String propName) {
return stripNonAlphanumeric(node.getColumnOrderKey()) + "." + stripNonAlphanumeric(propName);
}
private static String getColumnKeyBase(SearchResultsDTO searchResult, String propName) {
return stripNonAlphanumeric(searchResult.getSignature()) + "." + stripNonAlphanumeric(propName);
}
private static String stripNonAlphanumeric(String str) {
return str.replaceAll("[^a-zA-Z0-9_]", "");

View File

@ -18,18 +18,16 @@
*/
package org.sleuthkit.autopsy.corecomponents;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.awt.Image;
import java.awt.Toolkit;
import java.lang.ref.SoftReference;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
@ -38,16 +36,19 @@ import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import javax.swing.SwingUtilities;
import javax.swing.Timer;
import org.apache.commons.lang3.StringUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.FilterNode;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion;
import static org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.loadSortCriteria;
import org.sleuthkit.autopsy.coreutils.ImageUtils;
@ -64,72 +65,61 @@ import org.sleuthkit.datamodel.Content;
* Filter-node like class, but adds additional hierarchy (pages) as parents of
* the filtered nodes.
*/
class ThumbnailViewChildren extends ChildFactory.Detachable<Node> {
class ThumbnailViewChildren extends Children.Keys<Integer> {
private static final Logger logger = Logger.getLogger(ThumbnailViewChildren.class.getName());
@NbBundle.Messages("ThumbnailViewChildren.progress.cancelling=(Cancelling)")
private static final String CANCELLING_POSTIX = Bundle.ThumbnailViewChildren_progress_cancelling();
static final int IMAGES_PER_PAGE = 200;
private final ExecutorService executor = Executors.newFixedThreadPool(3,
new ThreadFactoryBuilder().setNameFormat("Thumbnail-Loader-%d").build());
private final List<ThumbnailViewNode.ThumbnailLoadTask> tasks = new ArrayList<>();
private final Node parent;
private final List<List<Node>> pages = new ArrayList<>();
private int thumbSize;
private final Map<String, ThumbnailViewNode> nodeCache = new HashMap<>();
private final Object isSupportedLock = new Object();
/**
* The constructor
*
* @param parent The node which is the parent of this children.
* @param parent The node which is the parent of this children.
* @param thumbSize The hight and/or width of the thumbnails in pixels.
*/
ThumbnailViewChildren(Node parent, int thumbSize) {
super(true); //support lazy loading
this.parent = parent;
this.thumbSize = thumbSize;
}
@Override
protected synchronized boolean createKeys(List<Node> toPopulate) {
List<Node> suppContent = Stream.of(parent.getChildren().getNodes())
.filter(n -> isSupported(n))
.sorted(getComparator())
.collect(Collectors.toList());
protected void addNotify() {
super.addNotify();
List<String> currNodeNames = suppContent.stream()
.map(nd -> nd.getName())
.collect(Collectors.toList());
/*
* TODO: When lazy loading of original nodes is fixed, we should be
* asking the datamodel for the children instead and not counting the
* children nodes (which might not be preloaded at this point).
*/
// get list of supported children sorted by persisted criteria
final List<Node> suppContent
= Stream.of(parent.getChildren().getNodes())
.filter(ThumbnailViewChildren::isSupported)
.sorted(getComparator())
.collect(Collectors.toList());
// find set of keys that are no longer present with current createKeys call.
Set<String> toRemove = new HashSet<>(nodeCache.keySet());
currNodeNames.forEach((k) -> toRemove.remove(k));
if (suppContent.isEmpty()) {
//if there are no images, there is nothing more to do
return;
}
// remove them from cache
toRemove.forEach((k) -> nodeCache.remove(k));
//divide the supported content into buckets
pages.addAll(Lists.partition(suppContent, IMAGES_PER_PAGE));
toPopulate.addAll(suppContent);
return true;
}
@Override
protected Node createNodeForKey(Node key) {
ThumbnailViewNode retNode = new ThumbnailViewNode(key, this.thumbSize);
nodeCache.put(key.getName(), retNode);
return retNode;
}
@Override
protected void removeNotify() {
super.removeNotify();
nodeCache.clear();
}
void update() {
this.refresh(false);
//the keys are just the indices into the pages list.
setKeys(IntStream.range(0, pages.size()).boxed().collect(Collectors.toList()));
}
/**
@ -214,15 +204,21 @@ class ThumbnailViewChildren extends ChildFactory.Detachable<Node> {
return null;
}
private boolean isSupported(Node node) {
@Override
protected void removeNotify() {
super.removeNotify();
pages.clear();
}
@Override
protected Node[] createNodes(Integer pageNum) {
return new Node[]{new ThumbnailPageNode(pageNum, pages.get(pageNum))};
}
private static boolean isSupported(Node node) {
if (node != null) {
Content content = null;
// this is to prevent dead-locking issue with simultaneous accesses.
synchronized (isSupportedLock) {
content = node.getLookup().lookup(AbstractFile.class);
}
Content content = node.getLookup().lookup(AbstractFile.class);
if (content != null) {
return ImageUtils.thumbnailSupported(content);
}
@ -232,9 +228,10 @@ class ThumbnailViewChildren extends ChildFactory.Detachable<Node> {
public void setThumbsSize(int thumbSize) {
this.thumbSize = thumbSize;
for (ThumbnailViewNode node : nodeCache.values()) {
node.setThumbSize(thumbSize);
for (Node page : getNodes()) {
for (Node node : page.getChildren().getNodes()) {
((ThumbnailViewNode) node).setThumbSize(thumbSize);
}
}
}
@ -252,7 +249,6 @@ class ThumbnailViewChildren extends ChildFactory.Detachable<Node> {
return task;
} else {
return null;
}
}
@ -277,7 +273,7 @@ class ThumbnailViewChildren extends ChildFactory.Detachable<Node> {
* The constructor
*
* @param wrappedNode The original node that this Node wraps.
* @param thumbSize The hight and/or width of the thumbnail in pixels.
* @param thumbSize The hight and/or width of the thumbnail in pixels.
*/
private ThumbnailViewNode(Node wrappedNode, int thumbSize) {
super(wrappedNode, FilterNode.Children.LEAF);
@ -384,4 +380,66 @@ class ThumbnailViewChildren extends ChildFactory.Detachable<Node> {
}
}
}
/**
* Node representing a page of thumbnails, a parent of image nodes, with a
* name showing children range
*/
private class ThumbnailPageNode extends AbstractNode {
private ThumbnailPageNode(Integer pageNum, List<Node> childNodes) {
super(new ThumbnailPageNodeChildren(childNodes), Lookups.singleton(pageNum));
setName(Integer.toString(pageNum + 1));
int from = 1 + (pageNum * IMAGES_PER_PAGE);
int to = from + ((ThumbnailPageNodeChildren) getChildren()).getChildCount() - 1;
setDisplayName(from + "-" + to);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS
}
}
/**
* Children.Keys implementation which uses nodes as keys, and wraps them in
* ThumbnailViewNodes as the child nodes.
*
*/
private class ThumbnailPageNodeChildren extends Children.Keys<Node> {
/*
* wrapped original nodes
*/
private List<Node> keyNodes = null;
ThumbnailPageNodeChildren(List<Node> keyNodes) {
super(true);
this.keyNodes = keyNodes;
}
@Override
protected void addNotify() {
super.addNotify();
setKeys(keyNodes);
}
@Override
protected void removeNotify() {
super.removeNotify();
setKeys(Collections.emptyList());
}
int getChildCount() {
return keyNodes.size();
}
@Override
protected Node[] createNodes(Node wrapped) {
if (wrapped != null) {
final ThumbnailViewNode thumb = new ThumbnailViewNode(wrapped, thumbSize);
return new Node[]{thumb};
} else {
return new Node[]{};
}
}
}
}

View File

@ -300,10 +300,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window
ImageNode.createSheet.name.name=Name
ImageNode.createSheet.name.displayName=Name
ImageNode.createSheet.name.desc=no description
Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null!
Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""!
Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0}
Installer.tskLibErr.err=Fatal Error!
Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\!
Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\!
Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0}
Installer.tskLibErr.err=Fatal Error\!
InterestingHits.interestingItems.text=INTERESTING ITEMS
InterestingHits.displayName.text=Interesting Items
InterestingHits.createSheet.name.name=Name

View File

@ -34,6 +34,7 @@ import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
@ -69,7 +70,7 @@ public class DataSourceFilesNode extends DisplayableItemNode {
}
public DataSourceFilesNode(long dsObjId) {
super(Children.create(new DataSourcesNodeChildren(dsObjId), true), Lookups.singleton(NAME));
super(Children.create(new FileSystemFactory.DataSourceFactory(dsObjId), true), Lookups.singleton(NAME));
displayName = (dsObjId > 0) ? NbBundle.getMessage(DataSourceFilesNode.class, "DataSourcesNode.group_by_datasource.name") : NAME;
init();
}
@ -85,75 +86,6 @@ public class DataSourceFilesNode extends DisplayableItemNode {
return getClass().getName();
}
/*
* Custom Keys implementation that listens for new data sources being added.
*/
public static class DataSourcesNodeChildren extends AbstractContentChildren<Content> {
private static final Logger logger = Logger.getLogger(DataSourcesNodeChildren.class.getName());
private final long datasourceObjId;
List<Content> currentKeys;
public DataSourcesNodeChildren() {
this(0);
}
public DataSourcesNodeChildren(long dsObjId) {
super("ds_" + Long.toString(dsObjId));
this.currentKeys = new ArrayList<>();
this.datasourceObjId = dsObjId;
}
private final PropertyChangeListener pcl = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
refresh(true);
}
}
};
@Override
protected void onAdd() {
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
}
@Override
protected void onRemove() {
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl);
currentKeys.clear();
}
@Override
protected List<Content> makeKeys() {
try {
if (datasourceObjId == 0) {
currentKeys = Case.getCurrentCaseThrows().getDataSources();
} else {
Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(datasourceObjId);
currentKeys = new ArrayList<>(Arrays.asList(content));
}
Collections.sort(currentKeys, new Comparator<Content>() {
@Override
public int compare(Content content1, Content content2) {
String content1Name = content1.getName().toLowerCase();
String content2Name = content2.getName().toLowerCase();
return content1Name.compareTo(content2Name);
}
});
} catch (TskCoreException | NoCurrentCaseException | TskDataException ex) {
logger.log(Level.SEVERE, "Error getting data sources: {0}", ex.getMessage()); // NON-NLS
}
return currentKeys;
}
}
@Override
public boolean isLeafTypeNode() {
return false;

View File

@ -48,7 +48,7 @@ class DataSourceGroupingNode extends DisplayableItemNode {
super(Optional.ofNullable(createDSGroupingNodeChildren(dataSource))
.orElse(new RootContentChildren(Arrays.asList(Collections.EMPTY_LIST))),
Lookups.singleton(dataSource));
if (dataSource instanceof Image) {
Image image = (Image) dataSource;

View File

@ -29,7 +29,6 @@ import java.util.logging.Level;
import java.util.stream.Collectors;
import javax.swing.Action;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
@ -46,6 +45,7 @@ import org.sleuthkit.autopsy.datamodel.hosts.AssociatePersonsMenuAction;
import org.sleuthkit.autopsy.datamodel.hosts.MergeHostMenuAction;
import org.sleuthkit.autopsy.datamodel.hosts.RemoveParentPersonAction;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam;
import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory;
import org.sleuthkit.autopsy.corecomponents.SelectionResponder;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.Host;
@ -221,7 +221,7 @@ public class HostNode extends DisplayableItemNode implements SelectionResponder{
* @param hosts The HostDataSources key.
*/
HostNode(HostDataSources hosts) {
this(Children.create(new HostGroupingChildren(HOST_DATA_SOURCES, hosts.getHost()), true), hosts.getHost());
this(Children.create(new FileSystemFactory(hosts.getHost()), true), hosts.getHost());
}
/**

View File

@ -140,8 +140,11 @@ public final class IconsUtil {
} else if (typeID == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) {
imageFile = "keyword_hits.png";
} else if (typeID == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID()
|| typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID()) {
|| typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID()
|| typeID == BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID()) {
imageFile = "interesting_item.png";
} else if (typeID == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) {
imageFile = "accounts.png";
} else {
imageFile = "artifact-icon.png"; //NON-NLS
}

View File

@ -43,6 +43,7 @@ import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode;
import org.sleuthkit.autopsy.datamodel.SlackFileNode;
import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode;
import org.sleuthkit.autopsy.datamodel.VolumeNode;
import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.Content;
@ -102,7 +103,10 @@ class DirectoryTreeFilterChildren extends FilterNode.Children {
return new Node[]{cloned};
} else if (origNode instanceof FileSize.FileSizeRootNode) {
Node cloned = ((FileSize.FileSizeRootNode) origNode).clone();
return new Node[]{cloned};
return new Node[]{cloned};
} else if (origNode instanceof FileSystemFactory.FileSystemTreeNode) {
Node cloned = ((FileSystemFactory.FileSystemTreeNode) origNode).clone();
return new Node[]{cloned};
} else if (origNode == null || !(origNode instanceof DisplayableItemNode)) {
return new Node[]{};
}

View File

@ -104,7 +104,7 @@ public final class ExtractUnallocAction extends AbstractAction {
public ExtractUnallocAction(String title, Image image, Volume volume) {
super(title);
this.volume = null;
this.volume = volume;
this.image = image;
chooserFactory = new JFileChooserFactory(CustomFileChooser.class);

View File

@ -33,10 +33,11 @@ public interface FileIngestModule extends IngestModule {
* IMPORTANT: In addition to returning ProcessResult.OK or
* ProcessResult.ERROR, modules should log all errors using methods provided
* by the org.sleuthkit.autopsy.coreutils.Logger class. Log messages should
* include the name and object ID of the data being processed. If an
* exception has been caught by the module, the exception should be sent to
* the Logger along with the log message so that a stack trace will appear
* in the application log.
* include the name and object ID of the data being processed and any other
* information that would be useful for debugging. If an exception has been
* caught by the module, the exception should be sent to the logger along
* with the log message so that a stack trace will appear in the application
* log.
*
* @param file The file to analyze.
*

View File

@ -170,10 +170,10 @@ public final class IngestJob {
* Starts data source level analysis for this job if it is running in
* streaming ingest mode.
*/
void processStreamingIngestDataSource() {
void addStreamedDataSource() {
if (ingestMode == Mode.STREAMING) {
if (ingestModuleExecutor != null) {
ingestModuleExecutor.startStreamingModeDataSourceAnalysis();
ingestModuleExecutor.addStreamedDataSource();
} else {
logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline");
}

View File

@ -33,6 +33,7 @@ import java.util.regex.Pattern;
import java.util.stream.Stream;
import javax.annotation.concurrent.GuardedBy;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import org.netbeans.api.progress.ProgressHandle;
import org.openide.util.Cancellable;
import org.openide.util.NbBundle;
@ -41,6 +42,7 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.ingest.IngestTasksScheduler.IngestJobTasksSnapshot;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
@ -190,21 +192,26 @@ final class IngestJobExecutor {
/*
* If running in the NetBeans thick client application version of Autopsy,
* NetBeans progress bars are used to display ingest job progress in the
* lower right hand corner of the main application window. A layer of
* abstraction to allow alternate representations of progress could be used
* here, as it is in other places in the application, to better decouple
* this object from the application's presentation layer.
* NetBeans progress handles (i.e., progress bars) are used to display
* ingest job progress in the lower right hand corner of the main
* application window.
*
* A layer of abstraction to allow alternate representations of progress
* could be used here, as it is in other places in the application (see
* implementations and usage of
* org.sleuthkit.autopsy.progress.ProgressIndicator interface), to better
* decouple this object from the application's presentation layer.
*/
private final boolean usingNetBeansGUI;
private final Object dataSourceIngestProgressLock = new Object();
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
private ProgressHandle dataSourceIngestProgressBar;
private final Object fileIngestProgressLock = new Object();
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
private final List<String> filesInProgress = new ArrayList<>();
private long estimatedFilesToProcess;
private long processedFiles;
private volatile long estimatedFilesToProcess;
private volatile long processedFiles;
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
private ProgressHandle fileIngestProgressBar;
private final Object artifactIngestProgressLock = new Object();
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
private ProgressHandle artifactIngestProgressBar;
/*
@ -534,7 +541,7 @@ final class IngestJobExecutor {
}
/**
* Determnines which inges job stage to start in and starts up the ingest
* Determnines which ingest job stage to start in and starts up the ingest
* module pipelines.
*
* @return A collection of ingest module startup errors, empty on success.
@ -664,41 +671,39 @@ final class IngestJobExecutor {
*/
private void startBatchModeAnalysis() {
synchronized (stageTransitionLock) {
logInfoMessage(String.format("Starting analysis in batch mode for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS
logInfoMessage("Starting ingest job in batch mode"); //NON-NLS
stage = IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS;
if (hasFileIngestModules()) {
/*
* Do a count of the files the data source processor has added
* to the case database. This number will be used to estimate
* how many files remain to be analyzed as each file ingest task
* is completed.
* Do an estimate of the total number of files to be analyzed.
* This number will be used to estimate of how many files remain
* to be analyzed as each file ingest task is completed. The
* numbers are estimates because file analysis can add carved
* files and/or derived files.
*/
long filesToProcess;
if (files.isEmpty()) {
filesToProcess = dataSource.accept(new GetFilesCountVisitor());
/*
* Do a count of the files the data source processor has
* added to the case database.
*/
estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor());
} else {
filesToProcess = files.size();
}
synchronized (fileIngestProgressLock) {
estimatedFilesToProcess = filesToProcess;
/*
* Use the number of files in the specified subset of all of
* the files for the data source.
*/
estimatedFilesToProcess = files.size();
}
startFileIngestProgressBar();
}
if (usingNetBeansGUI) {
/*
* Start ingest progress bars in the lower right hand corner of
* the main application window.
*/
if (hasFileIngestModules()) {
startFileIngestProgressBar();
}
if (hasHighPriorityDataSourceIngestModules()) {
startDataSourceIngestProgressBar();
}
if (hasDataArtifactIngestModules()) {
startArtifactIngestProgressBar();
}
if (hasHighPriorityDataSourceIngestModules()) {
startDataSourceIngestProgressBar();
}
if (hasDataArtifactIngestModules()) {
startArtifactIngestProgressBar();
}
/*
@ -708,60 +713,75 @@ final class IngestJobExecutor {
currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline;
/*
* Schedule ingest tasks and then immediately check for stage
* completion. This is necessary because it is possible that zero
* tasks will actually make it to task execution due to the file
* filter or other ingest job settings. In that case, there will
* never be a stage completion check in an ingest thread executing
* an ingest task, so such a job would run forever without a check
* here.
* Schedule ingest tasks. If only analyzing a subset of the files in
* the data source, the current assumption is that only file ingest
* tasks for those files need to be scheduled. Data artifact ingest
* tasks will be scheduled as data artifacts produced by the file
* analysis are posted to the blackboard.
*/
if (!files.isEmpty() && hasFileIngestModules()) {
taskScheduler.scheduleFileIngestTasks(this, files);
} else if (hasHighPriorityDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) {
taskScheduler.scheduleIngestTasks(this);
}
/*
* Check for stage completion. This is necessary because it is
* possible that none of the tasks that were just scheduled will
* actually make it to task execution, due to the file filter or
* other ingest job settings. If that happens, there will never be
* another stage completion check for this job in an ingest thread
* executing an ingest task, so such a job would run forever without
* a check here.
*/
checkForStageCompleted();
}
}
/**
* Starts analysis for a streaming mode ingest job. For a streaming mode
* job, the data source processor streams files in as it adds them to the
* case database and file analysis can begin before data source level
* analysis.
* job, a data source processor streams files to this ingest job executor as
* it adds the files to the case database, and file level analysis can begin
* before data source level analysis.
*/
private void startStreamingModeAnalysis() {
synchronized (stageTransitionLock) {
logInfoMessage("Starting data source level analysis in streaming mode"); //NON-NLS
logInfoMessage("Starting ingest job in streaming mode"); //NON-NLS
stage = IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY;
if (usingNetBeansGUI) {
if (hasFileIngestModules()) {
/*
* Start ingest progress bars in the lower right hand corner of
* the main application window.
* Start the file ingest progress bar, but do not schedule any
* file or data source ingest tasks. File ingest tasks will
* instead be scheduled as files are streamed in via
* addStreamedFiles(), and a data source ingest task will be
* scheduled later, via addStreamedDataSource().
*
* Note that because estimated files remaining to process still
* has its initial value of zero, the fle ingest progress bar
* will start in the "indeterminate" state. A rough estimate of
* the files to processed will be computed later, when all of
* the files have been added to the case database, as signaled
* by a call to the addStreamedDataSource().
*/
if (hasFileIngestModules()) {
/*
* Note that because estimated files remaining to process
* still has its initial value of zero, the progress bar
* will start in the "indeterminate" state. An estimate of
* the files to process can be computed later, when all of
* the files have been added ot the case database.
*/
startFileIngestProgressBar();
}
if (hasDataArtifactIngestModules()) {
startArtifactIngestProgressBar();
}
estimatedFilesToProcess = 0;
startFileIngestProgressBar();
}
if (hasDataArtifactIngestModules()) {
startArtifactIngestProgressBar();
/*
* Schedule artifact ingest tasks for any artifacts currently in
* the case database. This needs to be done before any files or
* the data source are streamed in to avoid analyzing the data
* artifacts added to the case database by those tasks twice.
* This constraint is implemented by restricting construction of
* a streaming mode IngestJob to
* IngestManager.openIngestStream(), which constructs and starts
* the job before returning the IngestStream. This means that
* the code in this method will run before addStreamedFiles() or
* addStreamedDataSource() can be called via the IngestStream.
*/
taskScheduler.scheduleDataArtifactIngestTasks(this);
}
@ -773,7 +793,7 @@ final class IngestJobExecutor {
* case database and streamed in, and the data source is now ready for
* analysis.
*/
void startStreamingModeDataSourceAnalysis() {
void addStreamedDataSource() {
synchronized (stageTransitionLock) {
logInfoMessage("Starting full first stage analysis in streaming mode"); //NON-NLS
stage = IngestJobExecutor.IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS;
@ -781,43 +801,36 @@ final class IngestJobExecutor {
if (hasFileIngestModules()) {
/*
* Do a count of the files the data source processor has added
* to the case database. This number will be used to estimate
* how many files remain to be analyzed as each file ingest task
* is completed.
* For ingest job progress reporting purposes, do a count of the
* files the data source processor has added to the case
* database.
*/
long filesToProcess = dataSource.accept(new GetFilesCountVisitor());
synchronized (fileIngestProgressLock) {
estimatedFilesToProcess = filesToProcess;
if (usingNetBeansGUI && fileIngestProgressBar != null) {
fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess);
}
}
estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor());
switchFileIngestProgressBarToDeterminate();
}
if (usingNetBeansGUI) {
currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline;
if (hasHighPriorityDataSourceIngestModules()) {
/*
* Start a data source level ingest progress bar in the lower
* right hand corner of the main application window. The file
* and data artifact ingest progress bars were already started
* in startStreamingModeAnalysis().
*/
if (hasHighPriorityDataSourceIngestModules()) {
startDataSourceIngestProgressBar();
}
}
startDataSourceIngestProgressBar();
currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline;
if (hasHighPriorityDataSourceIngestModules()) {
/*
* Schedule a task for the data source.
*/
IngestJobExecutor.taskScheduler.scheduleDataSourceIngestTask(this);
} else {
/*
* If no data source level ingest task is scheduled at this time
* and all of the file level and artifact ingest tasks scheduled
* during the initial file streaming stage have already
* executed, there will never be a stage completion check in an
* ingest thread executing an ingest task, so such a job would
* run forever without a check here.
* If no data source level ingest task is scheduled at this
* time, and all of the file level and artifact ingest tasks
* scheduled during the initial file streaming stage have
* already been executed, there will never be a stage completion
* check in an ingest thread executing an ingest task for this
* job, so such a job would run forever without a check here.
*/
checkForStageCompleted();
}
@ -830,13 +843,9 @@ final class IngestJobExecutor {
private void startLowPriorityDataSourceAnalysis() {
synchronized (stageTransitionLock) {
if (hasLowPriorityDataSourceIngestModules()) {
logInfoMessage(String.format("Starting low priority data source analysis for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS
logInfoMessage("Starting low priority data source analysis"); //NON-NLS
stage = IngestJobExecutor.IngestJobStage.LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS;
if (usingNetBeansGUI) {
startDataSourceIngestProgressBar();
}
startDataSourceIngestProgressBar();
currentDataSourceIngestPipeline = lowPriorityDataSourceIngestPipeline;
taskScheduler.scheduleDataSourceIngestTask(this);
}
@ -844,40 +853,42 @@ final class IngestJobExecutor {
}
/**
* Starts a data artifacts analysis NetBeans progress bar in the lower right
* hand corner of the main application window. The progress bar provides the
* user with a task cancellation button. Pressing it cancels the ingest job.
* Analysis already completed at the time that cancellation occurs is NOT
* discarded.
* Starts a NetBeans progress bar for data artifacts analysis in the lower
* right hand corner of the main application window. The progress bar
* provides the user with a task cancellation button. Pressing it cancels
* the ingest job. Analysis already completed at the time that cancellation
* occurs is NOT discarded.
*/
private void startArtifactIngestProgressBar() {
if (usingNetBeansGUI) {
synchronized (artifactIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName());
artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() {
@Override
public boolean cancel() {
IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
new Thread(() -> {
IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
}).start();
return true;
}
});
artifactIngestProgressBar.start();
artifactIngestProgressBar.switchToIndeterminate();
}
});
}
}
/**
* Starts a data source level analysis NetBeans progress bar in the lower
* right hand corner of the main application window. The progress bar
* Starts a NetBeans progress bar for data source level analysis in the
* lower right hand corner of the main application window. The progress bar
* provides the user with a task cancellation button. Pressing it cancels
* either the currently running data source level ingest module or the
* either the currently running data source level ingest module, or the
* entire ingest job. Analysis already completed at the time that
* cancellation occurs is NOT discarded.
*/
private void startDataSourceIngestProgressBar() {
if (usingNetBeansGUI) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName());
dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() {
@Override
@ -894,21 +905,25 @@ final class IngestJobExecutor {
String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title");
JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE);
if (panel.cancelAllDataSourceIngestModules()) {
IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
new Thread(() -> {
IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
}).start();
} else {
IngestJobExecutor.this.cancelCurrentDataSourceIngestModule();
new Thread(() -> {
IngestJobExecutor.this.cancelCurrentDataSourceIngestModule();
}).start();
}
return true;
}
});
dataSourceIngestProgressBar.start();
dataSourceIngestProgressBar.switchToIndeterminate();
}
});
}
}
/**
* Starts a file analysis NetBeans progress bar in the lower right hand
* Starts a NetBeans progress bar for file analysis in the lower right hand
* corner of the main application window. The progress bar provides the user
* with a task cancellation button. Pressing it cancels the ingest job.
* Analysis already completed at the time that cancellation occurs is NOT
@ -916,18 +931,63 @@ final class IngestJobExecutor {
*/
private void startFileIngestProgressBar() {
if (usingNetBeansGUI) {
synchronized (fileIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName());
fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() {
@Override
public boolean cancel() {
IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
new Thread(() -> {
IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
}).start();
return true;
}
});
fileIngestProgressBar.start();
fileIngestProgressBar.switchToDeterminate((int) this.estimatedFilesToProcess);
}
fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess);
});
}
}
/**
* Finishes the first stage progress bars.
*/
private void finishFirstStageProgressBars() {
if (usingNetBeansGUI) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.finish();
dataSourceIngestProgressBar = null;
}
if (fileIngestProgressBar != null) {
fileIngestProgressBar.finish();
fileIngestProgressBar = null;
}
});
}
}
/**
* Finishes all current progress bars.
*/
private void finishAllProgressBars() {
if (usingNetBeansGUI) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.finish();
dataSourceIngestProgressBar = null;
}
if (fileIngestProgressBar != null) {
fileIngestProgressBar.finish();
fileIngestProgressBar = null;
}
if (artifactIngestProgressBar != null) {
artifactIngestProgressBar.finish();
artifactIngestProgressBar = null;
}
});
}
}
@ -968,21 +1028,7 @@ final class IngestJobExecutor {
shutDownIngestModulePipeline(pipeline);
}
if (usingNetBeansGUI) {
synchronized (dataSourceIngestProgressLock) {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.finish();
dataSourceIngestProgressBar = null;
}
}
synchronized (fileIngestProgressLock) {
if (fileIngestProgressBar != null) {
fileIngestProgressBar.finish();
fileIngestProgressBar = null;
}
}
}
finishFirstStageProgressBars();
if (!jobCancelled && hasLowPriorityDataSourceIngestModules()) {
startLowPriorityDataSourceAnalysis();
@ -993,7 +1039,8 @@ final class IngestJobExecutor {
}
/**
* Shuts down the ingest module pipelines and progress bars.
* Shuts down the ingest module pipelines and ingest job progress
* indicators.
*/
private void shutDown() {
synchronized (stageTransitionLock) {
@ -1002,29 +1049,7 @@ final class IngestJobExecutor {
shutDownIngestModulePipeline(currentDataSourceIngestPipeline);
shutDownIngestModulePipeline(artifactIngestPipeline);
if (usingNetBeansGUI) {
synchronized (dataSourceIngestProgressLock) {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.finish();
dataSourceIngestProgressBar = null;
}
}
synchronized (fileIngestProgressLock) {
if (fileIngestProgressBar != null) {
fileIngestProgressBar.finish();
fileIngestProgressBar = null;
}
}
synchronized (artifactIngestProgressLock) {
if (artifactIngestProgressBar != null) {
artifactIngestProgressBar.finish();
artifactIngestProgressBar = null;
}
}
}
finishAllProgressBars();
if (ingestJobInfo != null) {
if (jobCancelled) {
@ -1100,7 +1125,7 @@ final class IngestJobExecutor {
if (!pipeline.isEmpty()) {
/*
* Get the file from the task. If the file was "streamed,"
* the task may only have the file object ID and a trip to
* the task may only have the file object ID, and a trip to
* the case database will be required.
*/
AbstractFile file;
@ -1114,46 +1139,24 @@ final class IngestJobExecutor {
return;
}
synchronized (fileIngestProgressLock) {
++processedFiles;
if (usingNetBeansGUI) {
if (processedFiles <= estimatedFilesToProcess) {
fileIngestProgressBar.progress(file.getName(), (int) processedFiles);
} else {
fileIngestProgressBar.progress(file.getName(), (int) estimatedFilesToProcess);
}
filesInProgress.add(file.getName());
}
}
/**
* Run the file through the modules in the pipeline.
* Run the file through the modules in the file ingest
* pipeline.
*/
final String fileName = file.getName();
processedFiles++;
updateFileIngestProgressForFileTaskStarted(fileName);
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(pipeline.performTask(task));
if (!errors.isEmpty()) {
logIngestModuleErrors(errors, file);
}
if (usingNetBeansGUI && !jobCancelled) {
synchronized (fileIngestProgressLock) {
/**
* Update the file ingest progress bar again, in
* case the file was being displayed.
*/
filesInProgress.remove(file.getName());
if (filesInProgress.size() > 0) {
fileIngestProgressBar.progress(filesInProgress.get(0));
} else {
fileIngestProgressBar.progress("");
}
}
}
updateFileProgressBarForFileTaskCompleted(fileName);
}
fileIngestPipelinesQueue.put(pipeline);
}
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file obj ID = %d)", task.getFileId()), ex);
logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file object ID = %d, thread ID = %d)", task.getFileId(), task.getThreadId()), ex);
Thread.currentThread().interrupt();
} finally {
taskScheduler.notifyTaskCompleted(task);
@ -1248,100 +1251,196 @@ final class IngestJobExecutor {
/**
* Updates the display name shown on the current data source level ingest
* progress bar for this job.
* progress bar for this job, if the job has not been cancelled.
*
* @param displayName The new display name.
*/
void updateDataSourceIngestProgressBarDisplayName(String displayName) {
if (usingNetBeansGUI && !jobCancelled) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.setDisplayName(displayName);
}
}
});
}
}
/**
* Switches the current data source level ingest progress bar to determinate
* mode. This should be called if the total work units to process the data
* source is known.
* mode, if the job has not been cancelled. This should be called if the
* total work units to process the data source is known.
*
* @param workUnits Total number of work units for the processing of the
* data source.
*/
void switchDataSourceIngestProgressBarToDeterminate(int workUnits) {
if (usingNetBeansGUI && !jobCancelled) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.switchToDeterminate(workUnits);
}
}
});
}
}
/**
* Switches the current data source level ingest progress bar to
* indeterminate mode. This should be called if the total work units to
* process the data source is unknown.
* indeterminate mode, if the job has not been cancelled. This should be
* called if the total work units to process the data source is unknown.
*/
void switchDataSourceIngestProgressBarToIndeterminate() {
if (usingNetBeansGUI && !jobCancelled) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.switchToIndeterminate();
}
}
});
}
}
/**
* Updates the current data source level ingest progress bar with the number
* of work units performed, if in the determinate mode.
* of work units performed, if in the determinate mode, and the job has not
* been cancelled.
*
* @param workUnits Number of work units performed.
*/
void advanceDataSourceIngestProgressBar(int workUnits) {
if (usingNetBeansGUI && !jobCancelled) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.progress("", workUnits);
}
}
});
}
}
/**
* Updates the current data source level ingest progress bar with a new task
* name, where the task name is the "subtitle" under the display name.
* name, where the task name is the "subtitle" under the display name, if
* the job has not been cancelled.
*
* @param currentTask The task name.
*/
void advanceDataSourceIngestProgressBar(String currentTask) {
if (usingNetBeansGUI && !jobCancelled) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.progress(currentTask);
}
}
});
}
}
/**
* Updates the current data source level ingest progress bar with a new task
* name and the number of work units performed, if in the determinate mode.
* The task name is the "subtitle" under the display name.
* name and the number of work units performed, if in the determinate mode,
* and the job has not been cancelled. The task name is the "subtitle" under
* the display name.
*
* @param currentTask The task name.
* @param workUnits Number of work units performed.
*/
void advanceDataSourceIngestProgressBar(String currentTask, int workUnits) {
if (usingNetBeansGUI && !jobCancelled) {
synchronized (dataSourceIngestProgressLock) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.progress(currentTask, workUnits);
}
}
});
}
}
/**
* Switches the file ingest progress bar to determinate mode, using the
* estimated number of files to process as the number of work units.
*/
private void switchFileIngestProgressBarToDeterminate() {
if (usingNetBeansGUI) {
SwingUtilities.invokeLater(() -> {
if (fileIngestProgressBar != null) {
fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess);
}
});
}
}
/**
* Updates the current file ingest progress bar upon start of analysis of a
* file, if the job has not been cancelled.
*
* @param fileName The name of the file.
*/
private void updateFileIngestProgressForFileTaskStarted(String fileName) {
if (usingNetBeansGUI && !jobCancelled) {
SwingUtilities.invokeLater(() -> {
/*
* If processedFiles exceeds estimatedFilesToProcess, i.e., the
* max work units set for the progress bar, the progress bar
* will go into an infinite loop throwing
* IllegalArgumentExceptions in the EDT (NetBeans bug). Also, a
* check-then-act race condition needs to be avoided here. This
* can be done without guarding processedFiles and
* estimatedFilesToProcess with the same lock because
* estimatedFilesToProcess does not change after it is used to
* switch the progress bar to determinate mode.
*/
long processedFilesCapture = processedFiles;
if (processedFilesCapture <= estimatedFilesToProcess) {
fileIngestProgressBar.progress(fileName, (int) processedFilesCapture);
} else {
fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess);
}
filesInProgress.add(fileName);
});
}
}
/**
* Updates the current file ingest progress bar upon completion of analysis
* of a file, if the job has not been cancelled.
*
* @param fileName The name of the file.
*/
private void updateFileProgressBarForFileTaskCompleted(String fileName) {
if (usingNetBeansGUI && !jobCancelled) {
SwingUtilities.invokeLater(() -> {
filesInProgress.remove(fileName);
/*
* Display the name of another file in progress, or the empty
* string if there are none.
*/
if (filesInProgress.size() > 0) {
fileIngestProgressBar.progress(filesInProgress.get(0));
} else {
fileIngestProgressBar.progress(""); // NON-NLS
}
});
}
}
/**
* Displays a "cancelling" message on all of the current ingest message
* progress bars.
*/
private void displayCancellingProgressMessage() {
if (usingNetBeansGUI) {
SwingUtilities.invokeLater(() -> {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()));
dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling"));
}
if (fileIngestProgressBar != null) {
fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()));
fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling"));
}
if (artifactIngestProgressBar != null) {
artifactIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataArtifactIngest.displayName", dataSource.getName()));
artifactIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling"));
}
});
}
}
@ -1358,27 +1457,28 @@ final class IngestJobExecutor {
/**
* Rescinds a temporary cancellation of data source level ingest that was
* used to stop a single data source level ingest module for this job.
* used to stop a single data source level ingest module for this job. The
* data source ingest progress bar is reset, if the job has not been
* cancelled.
*
* @param moduleDisplayName The display name of the module that was stopped.
*/
void currentDataSourceIngestModuleCancellationCompleted(String moduleDisplayName) {
currentDataSourceIngestModuleCancelled = false;
cancelledDataSourceIngestModules.add(moduleDisplayName);
if (usingNetBeansGUI) {
/**
* A new progress bar must be created because the cancel button of
* the previously constructed component is disabled by NetBeans when
* the user selects the "OK" button of the cancellation confirmation
* dialog popped up by NetBeans when the progress bar cancel button
* is pressed.
*/
synchronized (dataSourceIngestProgressLock) {
if (usingNetBeansGUI && !jobCancelled) {
SwingUtilities.invokeLater(() -> {
/**
* A new progress bar must be created because the cancel button
* of the previously constructed component is disabled by
* NetBeans when the user selects the "OK" button of the
* cancellation confirmation dialog popped up by NetBeans when
* the progress bar cancel button is pressed.
*/
dataSourceIngestProgressBar.finish();
dataSourceIngestProgressBar = null;
startDataSourceIngestProgressBar();
}
});
}
}
@ -1404,32 +1504,20 @@ final class IngestJobExecutor {
}
/**
* Requests cancellation of ingest, i.e., a shutdown of the data source
* level and file level ingest pipelines.
* Requests cancellation of the ingest job. All pending ingest tasks for the
* job will be cancelled, but any tasks already in progress in ingest
* threads will run to completion. This could take a while if the ingest
* modules executing the tasks are not checking the ingest job cancellation
* flag via the ingest joib context.
*
* @param reason The cancellation reason.
*/
void cancel(IngestJob.CancellationReason reason) {
jobCancelled = true;
cancellationReason = reason;
displayCancellingProgressMessage();
IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this);
if (usingNetBeansGUI) {
synchronized (dataSourceIngestProgressLock) {
if (dataSourceIngestProgressBar != null) {
dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()));
dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling"));
}
}
synchronized (this.fileIngestProgressLock) {
if (null != this.fileIngestProgressBar) {
this.fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()));
this.fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling"));
}
}
}
synchronized (threadRegistrationLock) {
for (Thread thread : pausedIngestThreads) {
thread.interrupt();
@ -1437,15 +1525,13 @@ final class IngestJobExecutor {
pausedIngestThreads.clear();
}
/*
* If a data source had no tasks in progress it may now be complete.
*/
checkForStageCompleted();
}
/**
* Queries whether or not cancellation, i.e., a shut down of the data source
* level and file level ingest pipelines for this job, has been requested.
* Queries whether or not cancellation of the ingest job has been requested.
* Ingest modules executing ingest tasks for this job should check this flag
* frequently via the ingest job context.
*
* @return True or false.
*/
@ -1454,9 +1540,9 @@ final class IngestJobExecutor {
}
/**
* Gets the reason this job was cancelled.
* If the ingest job was cancelled, gets the reason this job was cancelled.
*
* @return The cancellation reason, may be not cancelled.
* @return The cancellation reason, may be "not cancelled."
*/
IngestJob.CancellationReason getCancellationReason() {
return cancellationReason;
@ -1469,7 +1555,7 @@ final class IngestJobExecutor {
* @param message The message.
*/
private void logInfoMessage(String message) {
logger.log(Level.INFO, String.format("%s (data source = %s, object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS
logger.log(Level.INFO, String.format("%s (data source = %s, data source object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS
}
/**
@ -1481,7 +1567,7 @@ final class IngestJobExecutor {
* @param throwable The throwable associated with the error.
*/
private void logErrorMessage(Level level, String message, Throwable throwable) {
logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS
logger.log(level, String.format("%s (data source = %s, data source object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS
}
/**
@ -1492,7 +1578,7 @@ final class IngestJobExecutor {
* @param message The message.
*/
private void logErrorMessage(Level level, String message) {
logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS
logger.log(level, String.format("%s (data source = %s, data source object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS
}
/**
@ -1514,7 +1600,7 @@ final class IngestJobExecutor {
*/
private void logIngestModuleErrors(List<IngestModuleError> errors, AbstractFile file) {
for (IngestModuleError error : errors) {
logErrorMessage(Level.SEVERE, String.format("%s experienced an error during analysis while processing file %s, object ID %d", error.getModuleDisplayName(), file.getName(), file.getId()), error.getThrowable()); //NON-NLS
logErrorMessage(Level.SEVERE, String.format("%s experienced an error during analysis while processing file %s (object ID = %d)", error.getModuleDisplayName(), file.getName(), file.getId()), error.getThrowable()); //NON-NLS
}
}
@ -1549,20 +1635,25 @@ final class IngestJobExecutor {
long snapShotTime = new Date().getTime();
IngestJobTasksSnapshot tasksSnapshot = null;
if (includeIngestTasksSnapshot) {
synchronized (fileIngestProgressLock) {
processedFilesCount = processedFiles;
estimatedFilesToProcessCount = estimatedFilesToProcess;
snapShotTime = new Date().getTime();
}
processedFilesCount = processedFiles;
estimatedFilesToProcessCount = estimatedFilesToProcess;
snapShotTime = new Date().getTime();
tasksSnapshot = taskScheduler.getTasksSnapshotForJob(getIngestJobId());
}
return new Snapshot(dataSource.getName(),
getIngestJobId(), createTime,
return new Snapshot(
dataSource.getName(),
getIngestJobId(),
createTime,
getCurrentDataSourceIngestModule(),
fileIngestRunning, fileIngestStartTime,
jobCancelled, cancellationReason, cancelledDataSourceIngestModules,
processedFilesCount, estimatedFilesToProcessCount, snapShotTime, tasksSnapshot);
fileIngestRunning,
fileIngestStartTime,
jobCancelled,
cancellationReason,
cancelledDataSourceIngestModules,
processedFilesCount,
estimatedFilesToProcessCount,
snapShotTime,
tasksSnapshot);
}
/**

View File

@ -67,7 +67,7 @@ class IngestJobInputStream implements IngestStream {
@Override
public synchronized void close() {
closed = true;
ingestJob.processStreamingIngestDataSource();
ingestJob.addStreamedDataSource();
}
@Override

View File

@ -1050,7 +1050,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
}
/**
* Creates and starts an ingest job for a collection of data sources.
* Creates and starts an ingest job.
*/
private final class StartIngestJobTask implements Callable<Void> {

View File

@ -60,6 +60,11 @@ public interface IngestModule {
* must also be taken into consideration when sharing resources between
* module instances. See IngestModuleReferenceCounter.
*
* IMPORTANT: Start up IngestModuleException messages are displayed to the
* user, if a user is present. Therefore, an exception to the policy that
* exception messages are not localized is appropriate in this method. Also,
* the exception messages should be user-friendly.
*
* @param context Provides data and services specific to the ingest job and
* the ingest pipeline of which the module is a part.
*

View File

@ -49,7 +49,7 @@ public final class RunIngestModulesAction extends AbstractAction {
@Messages("RunIngestModulesAction.name=Run Ingest Modules")
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(SpecialDirectoryNode.class.getName());
private static final Logger logger = Logger.getLogger(RunIngestModulesAction.class.getName());
/*
* Note that the execution context is the name of the dialog that used to be

View File

@ -0,0 +1,66 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import java.beans.PropertyChangeEvent;
import java.util.Set;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
/**
* Internal methods that DAOs implement.
*/
abstract class AbstractDAO {
/**
* Clear any cached data (Due to change in view).
*/
abstract void clearCaches();
/**
* Handles an autopsy event (i.e. ingest, case, etc.). This method is
* responsible for clearing internal caches that are effected by the event
* and returning one or more DAOEvents that should be broadcasted to the
* views.
*
* @param evt The Autopsy event that recently came in from Ingest/Case.
*
* @return The list of DAOEvents that should be broadcasted to the views or
* an empty list if the Autopsy events are irrelevant to this DAO.
*/
abstract Set<? extends DAOEvent> processEvent(PropertyChangeEvent evt);
/**
* Handles the ingest complete or cancelled event. Any events that are
* delayed or batched are flushed and returned.
*
* @return The flushed events that were delayed and batched.
*/
abstract Set<? extends DAOEvent> handleIngestComplete();
/**
* Returns any categories that require a tree refresh. For instance, if web
* cache and web bookmarks haven't updated recently, and are currently set
* to an indeterminate amount (i.e. "..."), then broadcast an event forcing
* tree to update to a determinate count.
*
* @return The categories that require a tree refresh.
*/
abstract Set<? extends TreeEvent> shouldRefreshTree();
}

View File

@ -18,27 +18,42 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.AnalysisResultSetEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.AnalysisResultEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.python.google.common.collect.ImmutableSet;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.AnalysisResult;
@ -64,12 +79,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
private static Logger logger = Logger.getLogger(AnalysisResultDAO.class.getName());
// rule of thumb: 10 entries times number of cached SearchParams sub-types (BlackboardArtifactSearchParam, AnalysisResultSetSearchParam, KeywordHitSearchParam)
private static final int CACHE_SIZE = 30;
private static final long CACHE_DURATION = 2;
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<?>, AnalysisResultTableSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static AnalysisResultDAO instance = null;
@NbBundle.Messages({
@ -133,6 +142,21 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
return BlackboardArtifactDAO.getIgnoredTreeTypes();
}
@SuppressWarnings("deprecation")
private static final Set<Integer> STANDARD_SET_TYPES = ImmutableSet.of(
BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID(),
BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(),
BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID(),
BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()
);
// TODO We can probably combine all the caches at some point
private final Cache<SearchParams<BlackboardArtifactSearchParam>, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final Cache<SearchParams<AnalysisResultSetSearchParam>, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final Cache<SearchParams<KeywordHitSearchParam>, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final TreeCounts<AnalysisResultEvent> treeCounts = new TreeCounts<>();
private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
SleuthkitCase skCase = getCase();
@ -236,7 +260,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
return new AnalysisResultRowDTO((AnalysisResult) artifact, srcContent, isTimelineSupported, cellValues, id);
}
public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
BlackboardArtifact.Type artType = artifactKey.getArtifactType();
if (artType == null || artType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT
@ -247,18 +271,31 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
}
SearchParams<BlackboardArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams));
}
private boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) {
if (!(eventData instanceof AnalysisResultEvent)) {
return false;
}
return searchParamsCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams));
AnalysisResultEvent analysisResultEvt = (AnalysisResultEvent) eventData;
return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactType().getTypeID()
&& (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId());
}
public boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, ModuleDataEvent eventData) {
return key.getArtifactType().equals(eventData.getBlackboardArtifactType());
private boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) {
if (!(event instanceof AnalysisResultSetEvent)) {
return false;
}
AnalysisResultSetEvent setEvent = (AnalysisResultSetEvent) event;
return isAnalysisResultsInvalidating((AnalysisResultSearchParam) key, (AnalysisResultEvent) setEvent)
&& Objects.equals(key.getSetName(), setEvent.getSetName());
}
public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
// GVDTODO handle keyword hits
public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) {
throw new IllegalArgumentException(MessageFormat.format("Illegal data. "
+ "Data source id must be null or > 0. "
@ -266,14 +303,10 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
}
SearchParams<AnalysisResultSetSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams));
return setHitCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams));
}
public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) {
throw new IllegalArgumentException(MessageFormat.format("Illegal data. "
+ "Data source id must be null or > 0. "
@ -281,11 +314,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
}
SearchParams<KeywordHitSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
}
return keywordHitCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams));
}
return searchParamsCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams));
public void dropAnalysisResultCache() {
analysisResultCache.invalidateAll();
}
public void dropHashHitCache() {
setHitCache.invalidateAll();
}
public void dropKeywordHitCache() {
keywordHitCache.invalidateAll();
}
/**
@ -304,14 +345,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
// get row dto's sorted by display name
Map<BlackboardArtifact.Type, Long> typeCounts = getCounts(BlackboardArtifact.Category.ANALYSIS_RESULT, dataSourceId);
List<TreeResultsDTO.TreeItemDTO<AnalysisResultSearchParam>> treeItemRows = typeCounts.entrySet().stream()
.map(entry -> {
return new TreeResultsDTO.TreeItemDTO<>(
BlackboardArtifact.Category.ANALYSIS_RESULT.name(),
new AnalysisResultSearchParam(entry.getKey(), dataSourceId),
entry.getKey().getTypeID(),
entry.getKey().getDisplayName(),
entry.getValue());
})
.map(entry -> getTreeItem(entry.getKey(), dataSourceId, TreeDisplayCount.getDeterminate(entry.getValue())))
.sorted(Comparator.comparing(countRow -> countRow.getDisplayName()))
.collect(Collectors.toList());
@ -323,6 +357,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
}
}
private TreeItemDTO<AnalysisResultSearchParam> getTreeItem(BlackboardArtifact.Type type, Long dataSourceId, TreeDisplayCount displayCount) {
return new TreeItemDTO<>(
BlackboardArtifact.Category.ANALYSIS_RESULT.name(),
new AnalysisResultSearchParam(type, dataSourceId),
type.getTypeID(),
type.getDisplayName(),
displayCount);
}
/**
*
* @param type The artifact type to filter on.
@ -404,18 +447,28 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
.filter(entry -> nullSetName != null || entry.getKey() != null)
.sorted((a, b) -> compareSetStrings(a.getKey(), b.getKey()))
.map(entry -> {
return new TreeItemDTO<>(
type.getTypeName(),
new AnalysisResultSetSearchParam(type, dataSourceId, entry.getKey()),
entry.getKey() == null ? 0 : entry.getKey(),
return getSetTreeItem(type,
dataSourceId,
entry.getKey(),
entry.getKey() == null ? nullSetName : entry.getKey(),
entry.getValue());
TreeDisplayCount.getDeterminate(entry.getValue()));
})
.collect(Collectors.toList());
return new TreeResultsDTO<>(allSets);
}
private TreeItemDTO<AnalysisResultSetSearchParam> getSetTreeItem(BlackboardArtifact.Type type,
Long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) {
return new TreeItemDTO<>(
type.getTypeName(),
new AnalysisResultSetSearchParam(type, dataSourceId, setName),
setName == null ? 0 : setName,
displayName,
displayCount);
}
/**
* Compares set strings to properly order for the tree.
*
@ -547,13 +600,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
break;
}
items.add(new TreeItemDTO<>(
TreeItemDTO<KeywordSearchTermParams> treeItem = new TreeItemDTO<>(
"KEYWORD_SEARCH_TERMS",
new KeywordSearchTermParams(setName, searchTerm, TskData.KeywordSearchQueryType.valueOf(searchType), hasChildren, dataSourceId),
searchTermModified,
searchTermModified,
count
));
TreeDisplayCount.getDeterminate(count)
);
items.add(treeItem);
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex);
@ -642,7 +697,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId),
keyword,
keyword == null ? "" : keyword,
count));
TreeDisplayCount.getDeterminate(count)));
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex);
@ -655,35 +710,163 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
}
}
/**
* Handles basic functionality of fetching and paging of analysis results.
*/
static abstract class AbstractAnalysisResultFetcher<T extends AnalysisResultSearchParam> extends DAOFetcher<T> {
@Override
void clearCaches() {
this.analysisResultCache.invalidateAll();
this.keywordHitCache.invalidateAll();
this.setHitCache.invalidateAll();
this.handleIngestComplete();
}
/**
* Main constructor.
*
* @param params Parameters to handle fetching of data.
*/
public AbstractAnalysisResultFetcher(T params) {
super(params);
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// get a grouping of artifacts mapping the artifact type id to data source id.
Map<BlackboardArtifact.Type, Set<Long>> analysisResultMap = new HashMap<>();
Map<Pair<BlackboardArtifact.Type, String>, Set<Long>> setMap = new HashMap<>();
Map<KeywordMatchParams, Set<Long>> keywordHitsMap = new HashMap<>();
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
ModuleDataEvent dataEvent = this.getModuleDataFromEvt(evt);
if (dataEvent == null) {
return false;
ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt);
if (dataEvt != null) {
for (BlackboardArtifact art : dataEvt.getArtifacts()) {
try {
if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) {
// GVDTODO handle keyword hits
} else if (STANDARD_SET_TYPES.contains(art.getArtifactTypeID())) {
BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME);
String setName = setAttr == null ? null : setAttr.getValueString();
setMap.computeIfAbsent(Pair.of(art.getType(), setName), (k) -> new HashSet<>())
.add(art.getDataSourceObjectID());
} else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) {
analysisResultMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>())
.add(art.getDataSourceObjectID());
}
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to fetch necessary information for artifact id: " + art.getId(), ex);
}
}
return MainDAO.getInstance().getAnalysisResultDAO().isAnalysisResultsInvalidating(this.getParameters(), dataEvent);
}
// don't continue if no relevant items found
if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) {
return Collections.emptySet();
}
clearRelevantCacheEntries(analysisResultMap, setMap);
List<AnalysisResultEvent> daoEvents = getResultViewEvents(analysisResultMap, setMap);
Collection<TreeEvent> treeEvents = this.treeCounts.enqueueAll(daoEvents).stream()
.map(arEvt -> getTreeEvent(arEvt, false))
.collect(Collectors.toList());
return Stream.of(daoEvents, treeEvents)
.flatMap(lst -> lst.stream())
.collect(Collectors.toSet());
}
/**
* Generate result view events from digest of Autopsy events.
*
* @param analysisResultMap Contains the analysis results that do not use a
* set name. A mapping of analysis result type ids
* to data sources where the results were created.
* @param resultsWithSetMap Contains the anlaysis results that do use a set
* name. A mapping of (analysis result type id, set
* name) to data sources where results were
* created.
*
* @return The list of dao events.
*/
private List<AnalysisResultEvent> getResultViewEvents(Map<BlackboardArtifact.Type, Set<Long>> analysisResultMap, Map<Pair<BlackboardArtifact.Type, String>, Set<Long>> resultsWithSetMap) {
Stream<AnalysisResultEvent> analysisResultEvts = analysisResultMap.entrySet().stream()
.flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId)));
Stream<AnalysisResultEvent> analysisResultSetEvts = resultsWithSetMap.entrySet().stream()
.flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId)));
// GVDTODO handle keyword hits
return Stream.of(analysisResultEvts, analysisResultSetEvts)
.flatMap(s -> s)
.collect(Collectors.toList());
}
/**
* Clears cache entries given the provided digests of autopsy events.
*
* @param analysisResultMap Contains the analysis results that do not use a
* set name. A mapping of analysis result type ids
* to data sources where the results were created.
* @param resultsWithSetMap Contains the anlaysis results that do use a set
* name. A mapping of (analysis result type id, set
* name) to data sources where results were
* created.
*/
private void clearRelevantCacheEntries(Map<BlackboardArtifact.Type, Set<Long>> analysisResultMap, Map<Pair<BlackboardArtifact.Type, String>, Set<Long>> resultsWithSetMap) {
ConcurrentMap<SearchParams<BlackboardArtifactSearchParam>, AnalysisResultTableSearchResultsDTO> arConcurrentMap = this.analysisResultCache.asMap();
arConcurrentMap.forEach((k, v) -> {
BlackboardArtifactSearchParam searchParam = k.getParamData();
Set<Long> dsIds = analysisResultMap.get(searchParam.getArtifactType());
if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) {
arConcurrentMap.remove(k);
}
});
ConcurrentMap<SearchParams<AnalysisResultSetSearchParam>, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap();
setConcurrentMap.forEach((k, v) -> {
AnalysisResultSetSearchParam searchParam = k.getParamData();
Set<Long> dsIds = resultsWithSetMap.get(Pair.of(searchParam.getArtifactType(), searchParam.getSetName()));
if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) {
arConcurrentMap.remove(k);
}
});
// GVDTODO handle clearing cache for keyword search hits
// private final Cache<SearchParams<KeywordHitSearchParam>, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
}
/**
* Creates a TreeEvent instance based on the analysis result event and
* whether or not this event should trigger a full refresh of counts.
*
* @param arEvt The analysis result event.
* @param shouldRefresh Whether or not this tree event should trigger a full
* refresh of counts.
*
* @return The tree event.
*/
private TreeEvent getTreeEvent(AnalysisResultEvent arEvt, boolean shouldRefresh) {
// GVDTODO handle keyword items when integrated
if (arEvt instanceof AnalysisResultSetEvent) {
AnalysisResultSetEvent setEvt = (AnalysisResultSetEvent) arEvt;
return new TreeEvent(getSetTreeItem(setEvt.getArtifactType(), setEvt.getDataSourceId(),
setEvt.getSetName(), setEvt.getSetName() == null ? "" : setEvt.getSetName(),
shouldRefresh ? TreeDisplayCount.UNSPECIFIED : TreeDisplayCount.INDETERMINATE),
shouldRefresh);
} else {
return new TreeEvent(getTreeItem(arEvt.getArtifactType(), arEvt.getDataSourceId(),
shouldRefresh ? TreeDisplayCount.UNSPECIFIED : TreeDisplayCount.INDETERMINATE),
shouldRefresh);
}
}
@Override
Set<DAOEvent> handleIngestComplete() {
return this.treeCounts.flushEvents().stream()
.map(arEvt -> getTreeEvent(arEvt, true))
.collect(Collectors.toSet());
}
@Override
Set<TreeEvent> shouldRefreshTree() {
return this.treeCounts.getEventTimeouts().stream()
.map(arEvt -> getTreeEvent(arEvt, true))
.collect(Collectors.toSet());
}
/**
* Handles fetching and paging of analysis results.
*/
public static class AnalysisResultFetcher extends AbstractAnalysisResultFetcher<AnalysisResultSearchParam> {
public static class AnalysisResultFetcher extends DAOFetcher<AnalysisResultSearchParam> {
/**
* Main constructor.
@ -694,16 +877,25 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
super(params);
}
protected AnalysisResultDAO getDAO() {
return MainDAO.getInstance().getAnalysisResultDAO();
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isAnalysisResultsInvalidating(this.getParameters(), evt);
}
}
/**
* Handles fetching and paging of hashset hits.
*/
public static class AnalysisResultSetFetcher extends AbstractAnalysisResultFetcher<AnalysisResultSetSearchParam> {
public static class AnalysisResultSetFetcher extends DAOFetcher<AnalysisResultSetSearchParam> {
/**
* Main constructor.
@ -714,16 +906,25 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
super(params);
}
protected AnalysisResultDAO getDAO() {
return MainDAO.getInstance().getAnalysisResultDAO();
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isAnalysisResultsSetInvalidating(this.getParameters(), evt);
}
}
/**
* Handles fetching and paging of keyword hits.
*/
public static class KeywordHitResultFetcher extends AbstractAnalysisResultFetcher<KeywordHitSearchParam> {
public static class KeywordHitResultFetcher extends DAOFetcher<KeywordHitSearchParam> {
/**
* Main constructor.
@ -734,9 +935,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO {
super(params);
}
protected AnalysisResultDAO getDAO() {
return MainDAO.getInstance().getAnalysisResultDAO();
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getAnalysisResultDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
// GVDTODO
return true;
}
}
}

View File

@ -27,11 +27,12 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
public class AnalysisResultTableSearchResultsDTO extends BaseSearchResultsDTO {
private static final String TYPE_ID = "ANALYSIS_RESULT";
private static final String SIGNATURE = "analysisresult";
private final BlackboardArtifact.Type artifactType;
public AnalysisResultTableSearchResultsDTO(BlackboardArtifact.Type artifactType, List<ColumnKey> columns, List<RowDTO> items, long startItem, long totalResultsCount) {
super(TYPE_ID, artifactType.getDisplayName(), columns, items, startItem, totalResultsCount);
super(TYPE_ID, artifactType.getDisplayName(), columns, items, SIGNATURE, startItem, totalResultsCount);
this.artifactType = artifactType;
}

View File

@ -31,18 +31,20 @@ public class BaseSearchResultsDTO implements SearchResultsDTO {
private final List<RowDTO> items;
private final long totalResultsCount;
private final long startItem;
private final String signature;
public BaseSearchResultsDTO(String typeId, String displayName, List<ColumnKey> columns, List<RowDTO> items) {
this(typeId, displayName, columns, items, 0, items == null ? 0 : items.size());
public BaseSearchResultsDTO(String typeId, String displayName, List<ColumnKey> columns, List<RowDTO> items, String signature) {
this(typeId, displayName, columns, items, signature, 0, items == null ? 0 : items.size());
}
public BaseSearchResultsDTO(String typeId, String displayName, List<ColumnKey> columns, List<RowDTO> items, long startItem, long totalResultsCount) {
public BaseSearchResultsDTO(String typeId, String displayName, List<ColumnKey> columns, List<RowDTO> items, String signature, long startItem, long totalResultsCount) {
this.typeId = typeId;
this.displayName = displayName;
this.columns = columns;
this.items = items;
this.startItem = startItem;
this.totalResultsCount = totalResultsCount;
this.signature = signature;
}
@Override
@ -74,4 +76,9 @@ public class BaseSearchResultsDTO implements SearchResultsDTO {
public long getStartItem() {
return startItem;
}
@Override
public String getSignature() {
return signature;
}
}

View File

@ -11,7 +11,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -71,7 +70,7 @@ import org.sleuthkit.datamodel.TskCoreException;
"BlackboardArtifactDAO.columnKeys.dataSource.displayName=Data Source",
"BlackboardArtifactDAO.columnKeys.dataSource.description=Data Source"
})
abstract class BlackboardArtifactDAO {
abstract class BlackboardArtifactDAO extends AbstractDAO {
private static Logger logger = Logger.getLogger(BlackboardArtifactDAO.class.getName());
@ -149,6 +148,7 @@ abstract class BlackboardArtifactDAO {
protected static Set<BlackboardArtifact.Type> getIgnoredTreeTypes() {
return IGNORED_TYPES;
}
TableData createTableData(BlackboardArtifact.Type artType, List<BlackboardArtifact> arts) throws TskCoreException, NoCurrentCaseException {
Map<Long, Map<BlackboardAttribute.Type, Object>> artifactAttributes = new HashMap<>();

View File

@ -34,6 +34,9 @@ BlackboardArtifactDAO.columnKeys.score.name=Score
BlackboardArtifactDAO.columnKeys.srcFile.description=Source Name
BlackboardArtifactDAO.columnKeys.srcFile.displayName=Source Name
BlackboardArtifactDAO.columnKeys.srcFile.name=Source Name
CommAccounts.name.text=Communication Accounts
CommAccountsDAO.fileColumns.noDescription=No Description
DataArtifactDAO_Accounts_displayName=Communication Accounts
FileExtDocumentFilter_html_displayName=HTML
FileExtDocumentFilter_office_displayName=Office
FileExtDocumentFilter_pdf_displayName=PDF
@ -71,6 +74,8 @@ FileSystemColumnUtils.abstractFileColumns.sizeColLbl=Size
FileSystemColumnUtils.abstractFileColumns.typeDirColLbl=Type(Dir)
FileSystemColumnUtils.abstractFileColumns.typeMetaColLbl=Type(Meta)
FileSystemColumnUtils.abstractFileColumns.useridColLbl=UserID
FileSystemColumnUtils.getContentName.dotDir=[current folder]
FileSystemColumnUtils.getContentName.dotDotDir=[parent folder]
FileSystemColumnUtils.imageColumns.devID=Device ID
FileSystemColumnUtils.imageColumns.sectorSize=Sector Size (Bytes)
FileSystemColumnUtils.imageColumns.size=Size (Bytes)

View File

@ -22,18 +22,33 @@ import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard;
@ -43,16 +58,18 @@ import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Provides information to populate the results viewer for data in the
* Provides information to populate the results viewer for data in the
* Communication Accounts section.
*/
@Messages({"CommAccountsDAO.fileColumns.noDescription=No Description"})
public class CommAccountsDAO {
public class CommAccountsDAO extends AbstractDAO {
private static final Logger logger = Logger.getLogger(CommAccountsDAO.class.getName());
private static final int CACHE_SIZE = Account.Type.PREDEFINED_ACCOUNT_TYPES.size(); // number of cached SearchParams sub-types
private static final long CACHE_DURATION = 2;
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<?>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<CommAccountsSearchParams>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private final TreeCounts<CommAccountsEvent> accountCounts = new TreeCounts<>();
private static CommAccountsDAO instance = null;
@ -64,20 +81,20 @@ public class CommAccountsDAO {
return instance;
}
public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
SleuthkitCase getCase() throws NoCurrentCaseException {
return Case.getCurrentCaseThrows().getSleuthkitCase();
}
public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (key.getType() == null) {
throw new IllegalArgumentException("Must have non-null type");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
throw new IllegalArgumentException("Data source id must be greater than 0 or null");
}
SearchParams<CommAccountsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
SearchParams<CommAccountsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
return searchParamsCache.get(searchParams, () -> fetchCommAccountsDTOs(searchParams));
}
}
/**
* Returns a list of paged artifacts.
@ -103,10 +120,10 @@ public class CommAccountsDAO {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
BlackboardArtifact.Type artType = BlackboardArtifact.Type.TSK_ACCOUNT;
if ( (cacheKey.getStartItem() == 0) // offset is zero AND
&& ( (cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max
|| (cacheKey.getMaxResultsCount() == null)) ) { // OR max number of results was not specified
if ((cacheKey.getStartItem() == 0) // offset is zero AND
&& ((cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max
|| (cacheKey.getMaxResultsCount() == null))) { // OR max number of results was not specified
return currentPageSize;
} else {
if (dataSourceId != null) {
@ -114,9 +131,9 @@ public class CommAccountsDAO {
} else {
return blackboard.getArtifactsCount(artType.getTypeID());
}
}
}
}
@NbBundle.Messages({"CommAccounts.name.text=Communication Accounts"})
private SearchResultsDTO fetchCommAccountsDTOs(SearchParams<CommAccountsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException, SQLException {
@ -125,22 +142,188 @@ public class CommAccountsDAO {
Blackboard blackboard = skCase.getBlackboard();
Account.Type type = cacheKey.getParamData().getType();
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
List<BlackboardArtifact> allArtifacts = blackboard.getArtifacts(BlackboardArtifact.Type.TSK_ACCOUNT,
List<BlackboardArtifact> allArtifacts = blackboard.getArtifacts(BlackboardArtifact.Type.TSK_ACCOUNT,
BlackboardAttribute.Type.TSK_ACCOUNT_TYPE, type.getTypeName(), dataSourceId,
false); // GVDTODO handle approved/rejected account actions
false); // GVDTODO handle approved/rejected account actions
// get current page of artifacts
List<BlackboardArtifact> pagedArtifacts = getPaged(allArtifacts, cacheKey);
// Populate the attributes for paged artifacts in the list. This is done using one database call as an efficient way to
// load many artifacts/attributes at once.
// load many artifacts/attributes at once.
blackboard.loadBlackboardAttributes(pagedArtifacts);
DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO();
BlackboardArtifactDAO.TableData tableData = dataArtDAO.createTableData(BlackboardArtifact.Type.TSK_ACCOUNT, pagedArtifacts);
return new DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type.TSK_ACCOUNT, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), allArtifacts.size());
}
private static TreeResultsDTO.TreeItemDTO<CommAccountsSearchParams> createAccountTreeItem(Account.Type accountType, Long dataSourceId, TreeResultsDTO.TreeDisplayCount count) {
return new TreeResultsDTO.TreeItemDTO<>(
"ACCOUNTS",
new CommAccountsSearchParams(accountType, dataSourceId),
accountType.getTypeName(),
accountType.getDisplayName(),
count);
}
/**
* Returns the accounts and their counts in the current data source if a
* data source id is provided or all accounts if data source id is null.
*
* @param dataSourceId The data source id or null for no data source filter.
*
* @return The results.
*
* @throws ExecutionException
*/
public TreeResultsDTO<CommAccountsSearchParams> getAccountsCounts(Long dataSourceId) throws ExecutionException {
String query = "res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n"
+ "FROM (\n"
+ " SELECT MIN(account_types.type_name) AS account_type, MIN(account_types.display_name) AS account_display_name\n"
+ " FROM blackboard_artifacts\n"
+ " LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n"
+ " LEFT JOIN account_types ON blackboard_attributes.value_text = account_types.type_name\n"
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + "\n"
+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n"
+ (dataSourceId != null && dataSourceId > 0 ? " AND blackboard_artifacts.data_source_obj_id = " + dataSourceId + " " : " ") + "\n"
+ " -- group by artifact_id to ensure only one account type per artifact\n"
+ " GROUP BY blackboard_artifacts.artifact_id\n"
+ ") res\n"
+ "GROUP BY res.account_type\n"
+ "ORDER BY MIN(res.account_display_name)";
List<TreeResultsDTO.TreeItemDTO<CommAccountsSearchParams>> accountParams = new ArrayList<>();
try {
Set<Account.Type> indeterminateTypes = this.accountCounts.getEnqueued().stream()
.filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId)
.map(evt -> evt.getAccountType())
.collect(Collectors.toSet());
getCase().getCaseDbAccessManager().select(query, (resultSet) -> {
try {
while (resultSet.next()) {
String accountTypeName = resultSet.getString("account_type");
String accountDisplayName = resultSet.getString("account_display_name");
Account.Type accountType = new Account.Type(accountTypeName, accountDisplayName);
long count = resultSet.getLong("count");
TreeDisplayCount treeDisplayCount = indeterminateTypes.contains(accountType)
? TreeDisplayCount.INDETERMINATE
: TreeResultsDTO.TreeDisplayCount.getDeterminate(count);
accountParams.add(createAccountTreeItem(accountType, dataSourceId, treeDisplayCount));
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex);
}
});
// return results
return new TreeResultsDTO<>(accountParams);
} catch (NoCurrentCaseException | TskCoreException ex) {
throw new ExecutionException("An error occurred while fetching data artifact counts.", ex);
}
}
@Override
void clearCaches() {
this.searchParamsCache.invalidateAll();
this.handleIngestComplete();
}
@Override
Set<? extends DAOEvent> handleIngestComplete() {
return SubDAOUtils.getIngestCompleteEvents(
this.accountCounts,
(daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED)
);
}
@Override
Set<TreeEvent> shouldRefreshTree() {
return SubDAOUtils.getRefreshEvents(
this.accountCounts,
(daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED)
);
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// get a grouping of artifacts mapping the artifact type id to data source id.
ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt);
if (dataEvt == null) {
return Collections.emptySet();
}
Map<Account.Type, Set<Long>> accountTypeMap = new HashMap<>();
for (BlackboardArtifact art : dataEvt.getArtifacts()) {
try {
if (art.getType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) {
BlackboardAttribute accountTypeAttribute = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE);
if (accountTypeAttribute == null) {
continue;
}
String accountTypeName = accountTypeAttribute.getValueString();
if (accountTypeName == null) {
continue;
}
accountTypeMap.computeIfAbsent(getCase().getCommunicationsManager().getAccountType(accountTypeName), (k) -> new HashSet<>())
.add(art.getDataSourceObjectID());
}
} catch (NoCurrentCaseException | TskCoreException ex) {
logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex);
}
}
// don't do anything else if no relevant events
if (accountTypeMap.isEmpty()) {
return Collections.emptySet();
}
SubDAOUtils.invalidateKeys(this.searchParamsCache,
(sp) -> Pair.of(sp.getType(), sp.getDataSourceId()), accountTypeMap);
List<CommAccountsEvent> accountEvents = new ArrayList<>();
for (Map.Entry<Account.Type, Set<Long>> entry : accountTypeMap.entrySet()) {
Account.Type accountType = entry.getKey();
for (Long dsObjId : entry.getValue()) {
CommAccountsEvent newEvt = new CommAccountsEvent(accountType, dsObjId);
accountEvents.add(newEvt);
}
}
Stream<TreeEvent> treeEvents = this.accountCounts.enqueueAll(accountEvents).stream()
.map(daoEvt -> new TreeEvent(createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.INDETERMINATE), false));
return Stream.of(accountEvents.stream(), treeEvents)
.flatMap(s -> s)
.collect(Collectors.toSet());
}
/**
* Returns true if the dao event could update the data stored in the
* parameters.
*
* @param parameters The parameters.
* @param evt The event.
*
* @return True if event invalidates parameters.
*/
private boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) {
if (evt instanceof CommAccountsEvent) {
CommAccountsEvent commEvt = (CommAccountsEvent) evt;
return (parameters.getType().getTypeName().equals(commEvt.getType()))
&& (parameters.getDataSourceId() == null || Objects.equals(parameters.getDataSourceId(), commEvt.getDataSourceId()));
} else {
return false;
}
}
/**
* Handles fetching and paging of data for communication accounts.
*/
@ -155,53 +338,18 @@ public class CommAccountsDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getCommAccountsDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected CommAccountsDAO getDAO() {
return MainDAO.getInstance().getCommAccountsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
CommAccountsSearchParams params = this.getParameters();
String eventType = evt.getPropertyName();
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked out.
* Currently, remote events may be received for a case that is
* already closed.
*/
try {
Case.getCurrentCaseThrows();
/**
* Even with the check above, it is still possible that the
* case will be closed in a different thread before this
* code executes. If that happens, it is possible for the
* event to have a null oldValue.
*/
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData
&& eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) {
// check that the update is for the same account type
for (BlackboardArtifact artifact : eventData.getArtifacts()) {
for (BlackboardAttribute atribute : artifact.getAttributes()) {
if (atribute.getAttributeType() == BlackboardAttribute.Type.TSK_ACCOUNT_TYPE) {
if (atribute.getValueString().equals(params.getType().toString())) {
return true;
}
}
}
}
}
} catch (NoCurrentCaseException notUsed) {
// Case is closed, do nothing.
} catch (TskCoreException ex) {
// There is nothing we can do with the exception.
}
}
return false;
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isCommAcctInvalidating(this.getParameters(), evt);
}
}
}

View File

@ -20,16 +20,18 @@ package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.Objects;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* Key for accessing data about communication accounts from the DAO.
*/
public class CommAccountsSearchParams {
public class CommAccountsSearchParams extends DataArtifactSearchParam {
private final Account.Type type;
private final Long dataSourceId;
public CommAccountsSearchParams(Account.Type type, Long dataSourceId) {
super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId);
this.type = type;
this.dataSourceId = dataSourceId;
}

View File

@ -18,20 +18,35 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.DataArtifactEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import org.sleuthkit.autopsy.coreutils.Logger;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -43,6 +58,9 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* DAO for providing data about data artifacts to populate the results viewer.
*/
@NbBundle.Messages({
"DataArtifactDAO_Accounts_displayName=Communication Accounts"
})
public class DataArtifactDAO extends BlackboardArtifactDAO {
private static Logger logger = Logger.getLogger(DataArtifactDAO.class.getName());
@ -65,21 +83,22 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
}
private final Cache<SearchParams<BlackboardArtifactSearchParam>, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build();
private final TreeCounts<DataArtifactEvent> treeCounts = new TreeCounts<>();
private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
SleuthkitCase skCase = getCase();
Blackboard blackboard = skCase.getBlackboard();
BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
String pagedWhereClause = getWhereClause(cacheKey);
List<BlackboardArtifact> arts = new ArrayList<>();
arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause));
blackboard.loadBlackboardAttributes(arts);
long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
TableData tableData = createTableData(artType, arts);
return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount);
}
@ -92,7 +111,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
return new DataArtifactRowDTO((DataArtifact) artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id);
}
public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
BlackboardArtifact.Type artType = artifactKey.getArtifactType();
if (artType == null || artType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT
@ -103,19 +122,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
}
SearchParams<BlackboardArtifactSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
if (hardRefresh) {
this.dataArtifactCache.invalidate(searchParams);
}
return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams));
}
public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, ModuleDataEvent eventData) {
return key.getArtifactType().equals(eventData.getBlackboardArtifactType());
}
public void dropDataArtifactCache() {
dataArtifactCache.invalidateAll();
private boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) {
if (!(eventData instanceof DataArtifactEvent)) {
return false;
} else {
DataArtifactEvent dataArtEvt = (DataArtifactEvent) eventData;
return key.getArtifactType().getTypeID() == dataArtEvt.getArtifactType().getTypeID()
&& (key.getDataSourceId() == null || (key.getDataSourceId() == dataArtEvt.getDataSourceId()));
}
}
/**
@ -133,15 +150,19 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
public TreeResultsDTO<DataArtifactSearchParam> getDataArtifactCounts(Long dataSourceId) throws ExecutionException {
try {
// get row dto's sorted by display name
Set<BlackboardArtifact.Type> indeterminateTypes = this.treeCounts.getEnqueued().stream()
.filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId)
.map(evt -> evt.getArtifactType())
.collect(Collectors.toSet());
Map<BlackboardArtifact.Type, Long> typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId);
List<TreeResultsDTO.TreeItemDTO<DataArtifactSearchParam>> treeItemRows = typeCounts.entrySet().stream()
.map(entry -> {
return new TreeResultsDTO.TreeItemDTO<>(
BlackboardArtifact.Category.DATA_ARTIFACT.name(),
new DataArtifactSearchParam(entry.getKey(), dataSourceId),
entry.getKey().getTypeID(),
entry.getKey().getDisplayName(),
entry.getValue());
TreeDisplayCount displayCount = indeterminateTypes.contains(entry.getKey())
? TreeDisplayCount.INDETERMINATE
: TreeDisplayCount.getDeterminate(entry.getValue());
return createDataArtifactTreeItem(entry.getKey(), dataSourceId, displayCount);
})
.sorted(Comparator.comparing(countRow -> countRow.getDisplayName()))
.collect(Collectors.toList());
@ -154,6 +175,96 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
}
}
@Override
void clearCaches() {
this.dataArtifactCache.invalidateAll();
this.handleIngestComplete();
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// get a grouping of artifacts mapping the artifact type id to data source id.
ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt);
if (dataEvt == null) {
return Collections.emptySet();
}
Map<BlackboardArtifact.Type, Set<Long>> artifactTypeDataSourceMap = new HashMap<>();
for (BlackboardArtifact art : dataEvt.getArtifacts()) {
try {
if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())
// accounts are handled in CommAccountsDAO
&& art.getType().getTypeID() != BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) {
artifactTypeDataSourceMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>())
.add(art.getDataSourceObjectID());
}
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex);
}
}
// don't do anything else if no relevant events
if (artifactTypeDataSourceMap.isEmpty()) {
return Collections.emptySet();
}
SubDAOUtils.invalidateKeys(this.dataArtifactCache, (sp) -> Pair.of(sp.getArtifactType(), sp.getDataSourceId()), artifactTypeDataSourceMap);
// gather dao events based on artifacts
List<DataArtifactEvent> dataArtifactEvents = new ArrayList<>();
for (Entry<BlackboardArtifact.Type, Set<Long>> entry : artifactTypeDataSourceMap.entrySet()) {
BlackboardArtifact.Type artType = entry.getKey();
for (Long dsObjId : entry.getValue()) {
DataArtifactEvent newEvt = new DataArtifactEvent(artType, dsObjId);
dataArtifactEvents.add(newEvt);
}
}
Stream<TreeEvent> dataArtifactTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream()
.map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false));
return Stream.of(dataArtifactEvents.stream(), dataArtifactTreeEvents)
.flatMap(s -> s)
.collect(Collectors.toSet());
}
/**
* Returns the display name for an artifact type.
*
* @param artifactType The artifact type.
*
* @return The display name.
*/
public String getDisplayName(BlackboardArtifact.Type artifactType) {
return artifactType.getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()
? Bundle.DataArtifactDAO_Accounts_displayName()
: artifactType.getDisplayName();
}
private TreeItemDTO<DataArtifactSearchParam> createDataArtifactTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) {
return new TreeResultsDTO.TreeItemDTO<>(
BlackboardArtifact.Category.DATA_ARTIFACT.name(),
new DataArtifactSearchParam(artifactType, dataSourceId),
artifactType.getTypeID(),
getDisplayName(artifactType),
displayCount);
}
@Override
Set<? extends DAOEvent> handleIngestComplete() {
return SubDAOUtils.getIngestCompleteEvents(this.treeCounts,
(daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED));
}
@Override
Set<TreeEvent> shouldRefreshTree() {
return SubDAOUtils.getRefreshEvents(this.treeCounts,
(daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED));
}
/*
* Handles fetching and paging of data artifacts.
*/
@ -168,19 +279,18 @@ public class DataArtifactDAO extends BlackboardArtifactDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected DataArtifactDAO getDAO() {
return MainDAO.getInstance().getDataArtifactsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
ModuleDataEvent dataEvent = this.getModuleDataFromEvt(evt);
if (dataEvent == null) {
return false;
}
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
return MainDAO.getInstance().getDataArtifactsDAO().isDataArtifactInvalidating(this.getParameters(), dataEvent);
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isDataArtifactInvalidating(this.getParameters(), evt);
}
}
}

View File

@ -27,11 +27,12 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
public class DataArtifactTableSearchResultsDTO extends BaseSearchResultsDTO {
private static final String TYPE_ID = "DATA_ARTIFACT";
private static final String SIGNATURE = "dataartifact";
private final BlackboardArtifact.Type artifactType;
public DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type artifactType, List<ColumnKey> columns, List<RowDTO> items, long startItem, long totalResultsCount) {
super(TYPE_ID, artifactType.getDisplayName(), columns, items, startItem, totalResultsCount);
super(TYPE_ID, artifactType.getDisplayName(), columns, items, SIGNATURE, startItem, totalResultsCount);
this.artifactType = artifactType;
}

View File

@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.List;
import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.ExtensionMediaType;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.SlackFile;
@ -29,10 +30,6 @@ import org.sleuthkit.datamodel.TskData;
*/
public class FileRowDTO extends BaseRowDTO {
public enum ExtensionMediaType {
IMAGE, VIDEO, AUDIO, DOC, EXECUTABLE, TEXT, WEB, PDF, ARCHIVE, UNCATEGORIZED
}
private static String TYPE_ID = "FILE";
public static String getTypeIdForClass() {

View File

@ -0,0 +1,69 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
/**
* Filters by file size for views.
*/
public enum FileSizeFilter {
SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS
SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS
SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null);
//NON-NLS
private final int id;
private final String name;
private final String displayName;
private long minBound;
private Long maxBound;
private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) {
this.id = id;
this.name = name;
this.displayName = displayName;
this.minBound = minBound;
this.maxBound = maxBound;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
/**
* @return The minimum inclusive bound (non-null).
*/
public long getMinBound() {
return minBound;
}
/**
* @return The maximum exclusive bound (if null, no upper limit).
*/
public Long getMaxBound() {
return maxBound;
}
}

View File

@ -18,29 +18,41 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.SlackFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* Utility class for creating consistent table data.
*/
class FileSystemColumnUtils {
public class FileSystemColumnUtils {
private static final Logger logger = Logger.getLogger(FileSystemColumnUtils.class.getName());
@ -151,9 +163,10 @@ class FileSystemColumnUtils {
*
* @param content The Content object.
*
* @return The type corresponding to the content; UNSUPPORTED if the content will not be displayed
* @return The type corresponding to the content; UNSUPPORTED if the
* content will not be displayed in the file system section of the tree.
*/
private static ContentType getContentType(Content content) {
private static ContentType getDisplayableContentType(Content content) {
if (content instanceof Image) {
return ContentType.IMAGE;
} else if (content instanceof Volume) {
@ -167,9 +180,11 @@ class FileSystemColumnUtils {
}
/**
* Check whether a given content object should be displayed.
* Check whether a given content object should be displayed in the
* file system section of the tree.
* We can display an object if ContentType is not UNSUPPORTED
* and if it is not the root directory.
* and if it is not the root directory. We can not display
* file systems, volume systems, artifacts, etc.
*
* @param content The content.
*
@ -177,9 +192,15 @@ class FileSystemColumnUtils {
*/
static boolean isDisplayable(Content content) {
if (content instanceof AbstractFile) {
// .. directories near the top of the directory structure can
// pass the isRoot() check, so first check if the name is empty
// (real root directories will have a blank name field)
if (!content.getName().isEmpty()) {
return true;
}
return ! ((AbstractFile)content).isRoot();
}
return (getContentType(content) != ContentType.UNSUPPORTED);
return (getDisplayableContentType(content) != ContentType.UNSUPPORTED);
}
/**
@ -194,7 +215,7 @@ class FileSystemColumnUtils {
static List<ContentType> getDisplayableTypesForContentList(List<Content> contentList) {
List<ContentType> displayableTypes = new ArrayList<>();
for (Content content : contentList) {
ContentType type = getContentType(content);
ContentType type = getDisplayableContentType(content);
if (type != ContentType.UNSUPPORTED && ! displayableTypes.contains(type)) {
displayableTypes.add(type);
}
@ -288,11 +309,25 @@ class FileSystemColumnUtils {
return pool.getType().getName(); // We currently use the type name for both the name and type fields
} else if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile)content;
return file.getName(); // GVDTODO handle . and .. from getContentDisplayName()
return convertDotDirName(file);
}
return content.getName();
}
@NbBundle.Messages({
"FileSystemColumnUtils.getContentName.dotDir=[current folder]",
"FileSystemColumnUtils.getContentName.dotDotDir=[parent folder]",
})
public static String convertDotDirName(AbstractFile file) {
if (file.getName().equals("..")) {
return Bundle.FileSystemColumnUtils_getContentName_dotDotDir();
} else if (file.getName().equals(".")) {
return Bundle.FileSystemColumnUtils_getContentName_dotDir();
}
return file.getName();
}
/**
* Get the column keys for an abstract file object.
* Only use this method if all rows contain AbstractFile objects.
@ -459,7 +494,7 @@ class FileSystemColumnUtils {
*
* @return The display name.
*/
private static String getVolumeDisplayName(Volume vol) {
public static String getVolumeDisplayName(Volume vol) {
// set name, display name, and icon
String volName = "vol" + Long.toString(vol.getAddr());
long end = vol.getStart() + (vol.getLength() - 1);
@ -481,7 +516,9 @@ class FileSystemColumnUtils {
/**
* Get the content that should be displayed in the table based on the given object.
* Algorithm:
* - If content is already displayable, return it
* - If content is known and known files are being hidden, return an empty list
* - If content is a slack file and slack files are being hidden, return an empty list
* - If content is a displayable type, return it
* - If content is a volume system, return its displayable children
* - If content is a file system, return the displayable children of the root folder
* - If content is the root folder, return the displayable children of the root folder
@ -490,8 +527,41 @@ class FileSystemColumnUtils {
*
* @return List of content to add to the table.
*/
static List<Content> getNextDisplayableContent(Content content) throws TskCoreException {
static List<Content> getDisplayableContentForTable(Content content) throws TskCoreException {
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile)content;
// Skip known files if requested
if (UserPreferences.hideKnownFilesInDataSourcesTree()
&& file.getKnown().equals(TskData.FileKnown.KNOWN)) {
return new ArrayList<>();
}
// Skip slack files if requested
if (UserPreferences.hideSlackFilesInDataSourcesTree()
&& file instanceof SlackFile) {
return new ArrayList<>();
}
}
return getDisplayableContentForTableAndTree(content);
}
/**
* Get the content that should be displayed in the table based on the given object.
* Algorithm:
* - If content is a displayable type, return it
* - If content is a volume system, return its displayable children
* - If content is a file system, return the displayable children of the root folder
* - If content is the root folder, return the displayable children of the root folder
*
* @param content The base content.
*
* @return List of content to add to the table/tree.
*
* @throws TskCoreException
*/
private static List<Content> getDisplayableContentForTableAndTree(Content content) throws TskCoreException {
// If the given content is displayable, return it
if (FileSystemColumnUtils.isDisplayable(content)) {
return Arrays.asList(content);
@ -541,4 +611,83 @@ class FileSystemColumnUtils {
return new ColumnKey(name, name, Bundle.FileSystemColumnUtils_noDescription());
}
/**
* Get the children of a given content ID that will be visible in the tree.
*
* @param contentId The ID of the parent content.
*
* @return The visible children of the given content.
*
* @throws TskCoreException
* @throws NoCurrentCaseException
*/
public static List<Content> getVisibleTreeNodeChildren(Long contentId) throws TskCoreException, NoCurrentCaseException {
SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
Content content = skCase.getContentById(contentId);
List<Content> originalChildren = content.getChildren();
// First, advance past anything we don't display (volume systems, file systems, root folders)
List<Content> treeChildren = new ArrayList<>();
for (Content child : originalChildren) {
treeChildren.addAll(FileSystemColumnUtils.getDisplayableContentForTableAndTree(child));
}
// Filter out the . and .. directories
for (Iterator<Content> iter = treeChildren.listIterator(); iter.hasNext(); ) {
Content c = iter.next();
if ((c instanceof AbstractFile) && ContentUtils.isDotDirectory((AbstractFile)c)) {
iter.remove();
}
}
// Filter out any files without children
for (Iterator<Content> iter = treeChildren.listIterator(); iter.hasNext(); ) {
Content c = iter.next();
if (c instanceof AbstractFile && (! hasDisplayableContentChildren((AbstractFile)c))) {
iter.remove();
}
}
return treeChildren;
}
/**
* Check whether a file has displayable children.
*
* @param file The file to check.
*
* @return True if the file has displayable children, false otherwise.
*/
private static boolean hasDisplayableContentChildren(AbstractFile file) {
if (file != null) {
try {
// If the file has no children at all, then it has no displayable children.
if (!file.hasChildren()) {
return false;
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error checking if the node has children for file with ID: " + file.getId(), ex); //NON-NLS
return false;
}
String query = "SELECT COUNT(obj_id) AS count FROM "
+ " ( SELECT obj_id FROM tsk_objects WHERE par_obj_id = " + file.getId() + " AND type = "
+ TskData.ObjectType.ARTIFACT.getObjectType()
+ " INTERSECT SELECT artifact_obj_id FROM blackboard_artifacts WHERE obj_id = " + file.getId()
+ " AND (artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()
+ " OR artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() + ") "
+ " UNION SELECT obj_id FROM tsk_objects WHERE par_obj_id = " + file.getId()
+ " AND type = " + TskData.ObjectType.ABSTRACTFILE.getObjectType() + ") AS OBJECT_IDS"; //NON-NLS;
try (SleuthkitCase.CaseDbQuery dbQuery = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
if (resultSet.next()) {
return (0 < resultSet.getInt("count"));
}
} catch (TskCoreException | SQLException | NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Error checking if the node has children for file with ID: " + file.getId(), ex); //NON-NLS
}
}
return false;
}
}

View File

@ -20,17 +20,34 @@ package org.sleuthkit.autopsy.mainui.datamodel;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableSet;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent;
import org.sleuthkit.autopsy.casemodule.events.HostsAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.HostsAddedToPersonEvent;
import org.sleuthkit.autopsy.casemodule.events.HostsRemovedFromPersonEvent;
import org.sleuthkit.autopsy.casemodule.events.HostsUpdatedEvent;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import static org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.getExtensionMediaType;
import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.DirectoryRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.ImageRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.VolumeRowDTO;
@ -40,11 +57,17 @@ import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.VirtualDirectoryRowD
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.PoolRowDTO;
import static org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.getExtensionMediaType;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemContentEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemHostEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemPersonEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LayoutFile;
@ -55,20 +78,40 @@ import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.SlackFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.VirtualDirectory;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
/**
*
*/
public class FileSystemDAO {
public class FileSystemDAO extends AbstractDAO {
private static final Logger logger = Logger.getLogger(FileSystemDAO.class.getName());
private static final int CACHE_SIZE = 15; // rule of thumb: 5 entries times number of cached SearchParams sub-types
private static final long CACHE_DURATION = 2;
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<?>, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static final Set<String> HOST_LEVEL_EVTS = ImmutableSet.of(
Case.Events.DATA_SOURCE_ADDED.toString(),
// this should trigger the case to be reopened
// Case.Events.DATA_SOURCE_DELETED.toString(),
Case.Events.DATA_SOURCE_NAME_CHANGED.toString(),
Case.Events.HOSTS_ADDED.toString(),
Case.Events.HOSTS_DELETED.toString(),
Case.Events.HOSTS_UPDATED.toString()
);
private static final Set<String> PERSON_LEVEL_EVTS = ImmutableSet.of(
Case.Events.HOSTS_ADDED_TO_PERSON.toString(),
Case.Events.HOSTS_REMOVED_FROM_PERSON.toString()
);
private final Cache<SearchParams<?>, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static final String FILE_SYSTEM_TYPE_ID = "FILE_SYSTEM";
private static FileSystemDAO instance = null;
@ -79,26 +122,23 @@ public class FileSystemDAO {
}
return instance;
}
public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, Content eventContent) {
if(!(eventContent instanceof Content)) {
return false;
}
try {
return key.getContentObjectId() != eventContent.getParent().getId();
} catch (TskCoreException ex) {
// There is nothing we can do with the exception.
private boolean isSystemContentInvalidating(FileSystemContentSearchParam key, DAOEvent daoEvent) {
if (!(daoEvent instanceof FileSystemContentEvent)) {
return false;
}
FileSystemContentEvent contentEvt = (FileSystemContentEvent) daoEvent;
return contentEvt.getContentObjectId() == null || key.getContentObjectId().equals(contentEvt.getContentObjectId());
}
public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, Host eventHost) {
if(!(eventHost instanceof Host)) {
private boolean isSystemHostInvalidating(FileSystemHostSearchParam key, DAOEvent daoEvent) {
if (!(daoEvent instanceof FileSystemHostEvent)) {
return false;
}
return key.getHostObjectId() != eventHost.getHostId();
return key.getHostObjectId() == ((FileSystemHostEvent) daoEvent).getHostObjectId();
}
private BaseSearchResultsDTO fetchContentForTableFromContent(SearchParams<FileSystemContentSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
@ -115,7 +155,7 @@ public class FileSystemDAO {
parentName = parentContent.getName();
for (Content content : parentContent.getChildren()) {
contentForTable.addAll(FileSystemColumnUtils.getNextDisplayableContent(content));
contentForTable.addAll(FileSystemColumnUtils.getDisplayableContentForTable(content));
}
return fetchContentForTable(cacheKey, contentForTable, parentName);
@ -174,7 +214,7 @@ public class FileSystemDAO {
List<Object> cellValues = FileSystemColumnUtils.getCellValuesForHost(host);
rows.add(new BaseRowDTO(cellValues, FILE_SYSTEM_TYPE_ID, host.getHostId()));
}
return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), hostsForTable.size());
return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, Host.class.getName(), cacheKey.getStartItem(), hostsForTable.size());
}
private BaseSearchResultsDTO fetchContentForTable(SearchParams<?> cacheKey, List<Content> contentForTable,
@ -229,7 +269,7 @@ public class FileSystemDAO {
rows.add(new FileRowDTO(
file,
file.getId(),
file.getName(),
FileSystemColumnUtils.convertDotDirName(file),
file.getNameExtension(),
getExtensionMediaType(file.getNameExtension()),
file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC),
@ -237,7 +277,7 @@ public class FileSystemDAO {
cellValues));
}
}
return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), contentForTable.size());
return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, FILE_SYSTEM_TYPE_ID, cacheKey.getStartItem(), contentForTable.size());
}
/**
@ -260,34 +300,268 @@ public class FileSystemDAO {
return pagedArtsStream.collect(Collectors.toList());
}
public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
SearchParams<FileSystemContentSearchParam> searchParams = new SearchParams<>(objectKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchContentForTableFromContent(searchParams));
}
public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
SearchParams<FileSystemHostSearchParam> searchParams = new SearchParams<>(objectKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchContentForTableFromHost(searchParams));
}
public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
SearchParams<FileSystemPersonSearchParam> searchParams = new SearchParams<>(objectKey, startItem, maxCount);
if (hardRefresh) {
searchParamsCache.invalidate(searchParams);
return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams));
}
@Override
void clearCaches() {
this.searchParamsCache.invalidateAll();
}
private Long getHostFromDs(Content dataSource) {
if (!(dataSource instanceof DataSource)) {
return null;
}
return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams));
try {
Host host = ((DataSource) dataSource).getHost();
return host == null ? null : host.getHostId();
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "There was an error getting the host for data source with id: " + dataSource.getId(), ex);
return null;
}
}
/**
* In instances where parents are hidden, refresh the entire tree.
*
* @param parentContent The parent content.
*
* @return True if full tree should be refreshed.
*/
private boolean invalidatesAllFileSystem(Content parentContent) {
if (parentContent instanceof VolumeSystem || parentContent instanceof FileSystem) {
return true;
}
if (parentContent instanceof Directory) {
Directory dir = (Directory) parentContent;
return dir.isRoot() && !dir.getName().equals(".") && !dir.getName().equals("..");
}
if (parentContent instanceof LocalDirectory) {
return ((LocalDirectory) parentContent).isRoot();
}
return false;
}
@Override
Set<DAOEvent> handleIngestComplete() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<TreeEvent> shouldRefreshTree() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// GVDTODO these can probably be rewritten now that it isn't handling a collection of autopsy events
Set<Long> affectedPersons = new HashSet<>();
Set<Long> affectedHosts = new HashSet<>();
Set<Long> affectedParentContent = new HashSet<>();
boolean refreshAllContent = false;
Content content = DAOEventUtils.getDerivedFileContentFromFileEvent(evt);
if (content != null) {
Content parentContent;
try {
parentContent = content.getParent();
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to get parent content of content with id: " + content.getId(), ex);
return Collections.emptySet();
}
if (parentContent == null) {
return Collections.emptySet();
}
if (invalidatesAllFileSystem(parentContent)) {
refreshAllContent = true;
} else {
affectedParentContent.add(parentContent.getId());
}
} else if (evt instanceof DataSourceAddedEvent) {
Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource());
if (hostId != null) {
affectedHosts.add(hostId);
}
} else if (evt instanceof DataSourceNameChangedEvent) {
Long hostId = getHostFromDs(((DataSourceNameChangedEvent) evt).getDataSource());
if (hostId != null) {
affectedHosts.add(hostId);
}
} else if (evt instanceof HostsAddedEvent) {
// GVDTODO how best to handle host added?
} else if (evt instanceof HostsUpdatedEvent) {
// GVDTODO how best to handle host updated?
} else if (evt instanceof HostsAddedToPersonEvent) {
Person person = ((HostsAddedToPersonEvent) evt).getPerson();
affectedPersons.add(person == null ? null : person.getPersonId());
} else if (evt instanceof HostsRemovedFromPersonEvent) {
Person person = ((HostsRemovedFromPersonEvent) evt).getPerson();
affectedPersons.add(person == null ? null : person.getPersonId());
}
final boolean triggerFullRefresh = refreshAllContent;
// GVDTODO handling null ids versus the 'No Persons' option
ConcurrentMap<SearchParams<?>, BaseSearchResultsDTO> concurrentMap = this.searchParamsCache.asMap();
concurrentMap.forEach((k, v) -> {
Object searchParams = k.getParamData();
if (searchParams instanceof FileSystemPersonSearchParam) {
FileSystemPersonSearchParam personParam = (FileSystemPersonSearchParam) searchParams;
if (affectedPersons.contains(personParam.getPersonObjectId())) {
concurrentMap.remove(k);
}
} else if (searchParams instanceof FileSystemHostSearchParam) {
FileSystemHostSearchParam hostParams = (FileSystemHostSearchParam) searchParams;
if (affectedHosts.contains(hostParams.getHostObjectId())) {
concurrentMap.remove(k);
}
} else if (searchParams instanceof FileSystemContentSearchParam) {
FileSystemContentSearchParam contentParams = (FileSystemContentSearchParam) searchParams;
if (triggerFullRefresh
|| contentParams.getContentObjectId() == null
|| affectedParentContent.contains(contentParams.getContentObjectId())) {
concurrentMap.remove(k);
}
}
});
Stream<DAOEvent> fileEvts = triggerFullRefresh
? Stream.of(new FileSystemContentEvent(null))
: affectedParentContent.stream().map(id -> new FileSystemContentEvent(id));
return Stream.of(
affectedPersons.stream().map(id -> new FileSystemPersonEvent(id)),
affectedHosts.stream().map(id -> new FileSystemHostEvent(id)),
fileEvts
)
.flatMap(s -> s)
.collect(Collectors.toSet());
}
/**
* Get all data sources belonging to a given host.
*
* @param host The host.
*
* @return Results containing all data sources for the given host.
*
* @throws ExecutionException
*/
public TreeResultsDTO<FileSystemContentSearchParam> getDataSourcesForHost(Host host) throws ExecutionException {
try {
List<TreeResultsDTO.TreeItemDTO<FileSystemContentSearchParam>> treeItemRows = new ArrayList<>();
for (DataSource ds : Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().getDataSourcesForHost(host)) {
treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>(
ds.getClass().getSimpleName(),
new FileSystemContentSearchParam(ds.getId()),
ds,
ds.getName(),
null
));
}
return new TreeResultsDTO<>(treeItemRows);
} catch (NoCurrentCaseException | TskCoreException ex) {
throw new ExecutionException("An error occurred while fetching images for host with ID " + host.getHostId(), ex);
}
}
/**
* Create results for a single given data source ID (not its children).
*
* @param dataSourceObjId The data source object ID.
*
* @return Results containing just this data source.
*
* @throws ExecutionException
*/
public TreeResultsDTO<FileSystemContentSearchParam> getSingleDataSource(long dataSourceObjId) throws ExecutionException {
try {
List<TreeResultsDTO.TreeItemDTO<FileSystemContentSearchParam>> treeItemRows = new ArrayList<>();
DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceObjId);
treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>(
ds.getClass().getSimpleName(),
new FileSystemContentSearchParam(ds.getId()),
ds,
ds.getName(),
null
));
return new TreeResultsDTO<>(treeItemRows);
} catch (NoCurrentCaseException | TskCoreException | TskDataException ex) {
throw new ExecutionException("An error occurred while fetching data source with ID " + dataSourceObjId, ex);
}
}
/**
* Get the children that will be displayed in the tree for a given content ID.
*
* @param contentId Object ID of parent content.
*
* @return The results.
*
* @throws ExecutionException
*/
public TreeResultsDTO<FileSystemContentSearchParam> getDisplayableContentChildren(Long contentId) throws ExecutionException {
try {
List<Content> treeChildren = FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId);
List<TreeResultsDTO.TreeItemDTO<FileSystemContentSearchParam>> treeItemRows = new ArrayList<>();
for (Content child : treeChildren) {
Long countForNode = null;
if ((child instanceof AbstractFile)
&& ! (child instanceof LocalFilesDataSource)) {
countForNode = getContentForTable(new FileSystemContentSearchParam(child.getId()), 0, null).getTotalResultsCount();
}
treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>(
child.getClass().getSimpleName(),
new FileSystemContentSearchParam(child.getId()),
child,
getNameForContent(child),
countForNode == null ? TreeDisplayCount.NOT_SHOWN : TreeDisplayCount.getDeterminate(countForNode)
));
}
return new TreeResultsDTO<>(treeItemRows);
} catch (NoCurrentCaseException | TskCoreException ex) {
throw new ExecutionException("An error occurred while fetching data artifact counts.", ex);
}
}
/**
* Get display name for the given content.
*
* @param content The content.
*
* @return Display name for the content.
*/
private String getNameForContent(Content content) {
if (content instanceof Volume) {
return FileSystemColumnUtils.getVolumeDisplayName((Volume)content);
} else if (content instanceof AbstractFile) {
return FileSystemColumnUtils.convertDotDirName((AbstractFile) content);
}
return content.getName();
}
/**
@ -304,19 +578,18 @@ public class FileSystemDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getFileSystemDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected FileSystemDAO getDAO() {
return MainDAO.getInstance().getFileSystemDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
Content content = getContentFromEvt(evt);
if (content == null) {
return false;
}
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
return MainDAO.getInstance().getFileSystemDAO().isSystemContentInvalidating(getParameters(), content);
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isSystemContentInvalidating(this.getParameters(), evt);
}
}
@ -331,16 +604,18 @@ public class FileSystemDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getFileSystemDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected FileSystemDAO getDAO() {
return MainDAO.getInstance().getFileSystemDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
// TODO implement the method for determining if
// a refresh is needed.
return false;
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isSystemHostInvalidating(this.getParameters(), evt);
}
}
}

View File

@ -25,51 +25,6 @@ import java.util.Objects;
*/
public class FileTypeSizeSearchParams {
public enum FileSizeFilter {
SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS
SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS
SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null); //NON-NLS
private final int id;
private final String name;
private final String displayName;
private long minBound;
private Long maxBound;
private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) {
this.id = id;
this.name = name;
this.displayName = displayName;
this.minBound = minBound;
this.maxBound = maxBound;
}
public String getName() {
return this.name;
}
public int getId() {
return this.id;
}
public String getDisplayName() {
return this.displayName;
}
/**
* @return The minimum inclusive bound (non-null).
*/
public long getMinBound() {
return minBound;
}
/**
* @return The maximum exclusive bound (if null, no upper limit).
*/
public Long getMaxBound() {
return maxBound;
}
}
private final FileSizeFilter sizeFilter;
private final Long dataSourceId;

View File

@ -18,22 +18,149 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventBatcher;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.prefs.PreferenceChangeListener;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.collections4.CollectionUtils;
import org.python.google.common.collect.ImmutableSet;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
/**
* Main entry point for DAO for providing data to populate the data results
* viewer.
*/
public class MainDAO {
public class MainDAO extends AbstractDAO {
private static final Logger logger = Logger.getLogger(MainDAO.class.getName());
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS = EnumSet.of(
IngestManager.IngestJobEvent.COMPLETED,
IngestManager.IngestJobEvent.CANCELLED
);
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS = EnumSet.of(
IngestManager.IngestModuleEvent.CONTENT_CHANGED,
IngestManager.IngestModuleEvent.DATA_ADDED,
IngestManager.IngestModuleEvent.FILE_DONE
);
private static final Set<String> QUEUED_CASE_EVENTS = ImmutableSet.of(
Case.Events.OS_ACCOUNTS_ADDED.toString(),
Case.Events.OS_ACCOUNTS_UPDATED.toString(),
Case.Events.OS_ACCOUNTS_DELETED.toString(),
Case.Events.OS_ACCT_INSTANCES_ADDED.toString()
);
private static final long WATCH_RESOLUTION_MILLIS = 30 * 1000;
private static final long RESULT_BATCH_MILLIS = 5 * 1000;
private static MainDAO instance = null;
public synchronized static MainDAO getInstance() {
if (instance == null) {
instance = new MainDAO();
instance.init();
}
return instance;
}
/**
* The case event listener.
*/
private final PropertyChangeListener caseEventListener = (evt) -> {
try {
if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) {
this.clearCaches();
} else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) {
handleEvent(evt, false);
} else {
// handle case events immediately
handleEvent(evt, true);
}
} catch (Throwable ex) {
// firewall exception
logger.log(Level.WARNING, "An exception occurred while handling case events", ex);
}
};
/**
* The user preference listener.
*/
private final PreferenceChangeListener userPreferenceListener = (evt) -> {
try {
this.clearCaches();
} catch (Throwable ex) {
// firewall exception
logger.log(Level.WARNING, "An exception occurred while handling user preference change", ex);
}
};
/**
* The ingest module event listener.
*/
private final PropertyChangeListener ingestModuleEventListener = (evt) -> {
try {
handleEvent(evt, false);
} catch (Throwable ex) {
// firewall exception
logger.log(Level.WARNING, "An exception occurred while handling ingest module event", ex);
}
};
/**
* The ingest job event listener.
*/
private final PropertyChangeListener ingestJobEventListener = (evt) -> {
try {
handleEventFlush();
} catch (Throwable ex) {
// firewall exception
logger.log(Level.WARNING, "An exception occurred while handling ingest job event", ex);
}
};
private final ScheduledThreadPoolExecutor timeoutExecutor
= new ScheduledThreadPoolExecutor(1,
new ThreadFactoryBuilder().setNameFormat(MainDAO.class.getName()).build());
private final PropertyChangeManager resultEventsManager = new PropertyChangeManager();
private final PropertyChangeManager treeEventsManager = new PropertyChangeManager();
private final DAOEventBatcher<DAOEvent> eventBatcher = new DAOEventBatcher<>(
(evts) -> {
try {
fireResultEvts(evts);
} catch (Throwable ex) {
// firewall exception
logger.log(Level.WARNING, "An exception occurred while handling batched dao events", ex);
}
},
RESULT_BATCH_MILLIS);
private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance();
private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance();
private final ViewsDAO viewsDAO = ViewsDAO.getInstance();
@ -42,10 +169,54 @@ public class MainDAO {
private final OsAccountsDAO osAccountsDAO = OsAccountsDAO.getInstance();
private final CommAccountsDAO commAccountsDAO = CommAccountsDAO.getInstance();
// NOTE: whenever adding a new sub-dao, it should be added to this list for event updates.
private final List<AbstractDAO> allDAOs = ImmutableList.of(
dataArtifactDAO,
analysisResultDAO,
viewsDAO,
fileSystemDAO,
tagsDAO,
osAccountsDAO,
commAccountsDAO);
/**
* Registers listeners with autopsy event publishers and starts internal
* threads.
*/
void init() {
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener);
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener);
Case.addPropertyChangeListener(caseEventListener);
UserPreferences.addChangeListener(userPreferenceListener);
this.timeoutExecutor.scheduleAtFixedRate(
() -> {
try {
handleTreeEventTimeouts();
} catch (Throwable ex) {
// firewall exception
logger.log(Level.WARNING, "An exception occurred while handling tree event timeouts", ex);
}
},
WATCH_RESOLUTION_MILLIS,
WATCH_RESOLUTION_MILLIS,
TimeUnit.MILLISECONDS);
}
/**
* Unregisters listeners from autopsy event publishers.
*/
void unregister() {
IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener);
IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener);
Case.removePropertyChangeListener(caseEventListener);
UserPreferences.removeChangeListener(userPreferenceListener);
}
public DataArtifactDAO getDataArtifactsDAO() {
return dataArtifactDAO;
}
public AnalysisResultDAO getAnalysisResultDAO() {
return analysisResultDAO;
}
@ -53,20 +224,145 @@ public class MainDAO {
public ViewsDAO getViewsDAO() {
return viewsDAO;
}
public FileSystemDAO getFileSystemDAO() {
return fileSystemDAO;
}
public TagsDAO getTagsDAO() {
return tagsDAO;
}
public OsAccountsDAO getOsAccountsDAO() {
return osAccountsDAO;
}
public CommAccountsDAO getCommAccountsDAO() {
return commAccountsDAO;
}
public PropertyChangeManager getResultEventsManager() {
return this.resultEventsManager;
}
public PropertyChangeManager getTreeEventsManager() {
return treeEventsManager;
}
@Override
void clearCaches() {
allDAOs.forEach((subDAO) -> subDAO.clearCaches());
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
return allDAOs.stream()
.map(subDAO -> subDAO.processEvent(evt))
.flatMap(evts -> evts == null ? Stream.empty() : evts.stream())
.collect(Collectors.toSet());
}
@Override
Set<TreeEvent> shouldRefreshTree() {
return allDAOs.stream()
.map((subDAO) -> subDAO.shouldRefreshTree())
.flatMap(evts -> evts == null ? Stream.empty() : evts.stream())
.collect(Collectors.toSet());
}
@Override
Set<DAOEvent> handleIngestComplete() {
List<Collection<? extends DAOEvent>> daoStreamEvts = allDAOs.stream()
.map((subDAO) -> subDAO.handleIngestComplete())
.collect(Collectors.toList());
daoStreamEvts.add(eventBatcher.flushEvents());
return daoStreamEvts.stream()
.flatMap(evts -> evts == null ? Stream.empty() : evts.stream())
.collect(Collectors.toSet());
}
/**
* Processes and handles an autopsy event.
*
* @param evt The event.
* @param immediateResultAction If true, result events are immediately
* fired. Otherwise, the result events are
* batched.
*/
private void handleEvent(PropertyChangeEvent evt, boolean immediateResultAction) {
Collection<DAOEvent> daoEvts = processEvent(evt);
Map<DAOEvent.Type, Set<DAOEvent>> daoEvtsByType = daoEvts.stream()
.collect(Collectors.groupingBy(e -> e.getType(), Collectors.toSet()));
fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE));
Set<DAOEvent> resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT);
if (immediateResultAction) {
fireResultEvts(resultEvts);
} else {
eventBatcher.enqueueAllEvents(resultEvts);
}
}
private void handleEventFlush() {
Collection<DAOEvent> daoEvts = handleIngestComplete();
Map<DAOEvent.Type, Set<DAOEvent>> daoEvtsByType = daoEvts.stream()
.collect(Collectors.groupingBy(e -> e.getType(), Collectors.toSet()));
fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE));
Set<DAOEvent> resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT);
fireResultEvts(resultEvts);
}
private void fireResultEvts(Set<DAOEvent> resultEvts) {
if (CollectionUtils.isNotEmpty(resultEvts)) {
resultEventsManager.firePropertyChange("DATA_CHANGE", null, new DAOAggregateEvent(resultEvts));
}
}
private void fireTreeEvts(Set<? extends DAOEvent> treeEvts) {
if (CollectionUtils.isNotEmpty(treeEvts)) {
treeEventsManager.firePropertyChange("TREE_CHANGE", null, new DAOAggregateEvent(treeEvts));
}
}
private void handleTreeEventTimeouts() {
fireTreeEvts(this.shouldRefreshTree());
}
@Override
protected void finalize() throws Throwable {
unregister();
}
/**
* A wrapper around property change support that exposes
* addPropertyChangeListener and removePropertyChangeListener so that
* netbeans weak listeners can automatically unregister.
*/
public static class PropertyChangeManager {
private final PropertyChangeSupport support = new PropertyChangeSupport(this);
public void addPropertyChangeListener(PropertyChangeListener listener) {
support.addPropertyChangeListener(listener);
}
public void removePropertyChangeListener(PropertyChangeListener listener) {
support.removePropertyChangeListener(listener);
}
PropertyChangeListener[] getPropertyChangeListeners() {
return support.getPropertyChangeListeners();
}
void firePropertyChange(String propertyName, Object oldValue, Object newValue) {
support.firePropertyChange(propertyName, oldValue, newValue);
}
}
}

View File

@ -0,0 +1,102 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
/**
*
*/
public class MediaTypeUtils {
public enum ExtensionMediaType {
IMAGE, VIDEO, AUDIO, DOC, EXECUTABLE, TEXT, WEB, PDF, ARCHIVE, UNCATEGORIZED
}
public static ExtensionMediaType getExtensionMediaType(String ext) {
if (StringUtils.isBlank(ext)) {
return ExtensionMediaType.UNCATEGORIZED;
} else {
ext = "." + ext;
}
if (FileTypeExtensions.getImageExtensions().contains(ext)) {
return ExtensionMediaType.IMAGE;
} else if (FileTypeExtensions.getVideoExtensions().contains(ext)) {
return ExtensionMediaType.VIDEO;
} else if (FileTypeExtensions.getAudioExtensions().contains(ext)) {
return ExtensionMediaType.AUDIO;
} else if (FileTypeExtensions.getDocumentExtensions().contains(ext)) {
return ExtensionMediaType.DOC;
} else if (FileTypeExtensions.getExecutableExtensions().contains(ext)) {
return ExtensionMediaType.EXECUTABLE;
} else if (FileTypeExtensions.getTextExtensions().contains(ext)) {
return ExtensionMediaType.TEXT;
} else if (FileTypeExtensions.getWebExtensions().contains(ext)) {
return ExtensionMediaType.WEB;
} else if (FileTypeExtensions.getPDFExtensions().contains(ext)) {
return ExtensionMediaType.PDF;
} else if (FileTypeExtensions.getArchiveExtensions().contains(ext)) {
return ExtensionMediaType.ARCHIVE;
} else {
return ExtensionMediaType.UNCATEGORIZED;
}
}
/**
* Gets the path to the icon file that should be used to visually represent
* an AbstractFile, using the file name extension to select the icon.
*
* @param file An AbstractFile.
*
* @return An icon file path.
*/
public static String getIconForFileType(ExtensionMediaType fileType) {
if (fileType == null) {
return "org/sleuthkit/autopsy/images/file-icon.png";
}
switch (fileType) {
case IMAGE:
return "org/sleuthkit/autopsy/images/image-file.png";
case VIDEO:
return "org/sleuthkit/autopsy/images/video-file.png";
case AUDIO:
return "org/sleuthkit/autopsy/images/audio-file.png";
case DOC:
return "org/sleuthkit/autopsy/images/doc-file.png";
case EXECUTABLE:
return "org/sleuthkit/autopsy/images/exe-file.png";
case TEXT:
return "org/sleuthkit/autopsy/images/text-file.png";
case WEB:
return "org/sleuthkit/autopsy/images/web-file.png";
case PDF:
return "org/sleuthkit/autopsy/images/pdf-file.png";
case ARCHIVE:
return "org/sleuthkit/autopsy/images/archive-file.png";
default:
case UNCATEGORIZED:
return "org/sleuthkit/autopsy/images/file-icon.png";
}
}
private MediaTypeUtils() {
}
}

View File

@ -18,24 +18,31 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.python.google.common.collect.ImmutableSet;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.mainui.datamodel.events.OsAccountEvent;
import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.OsAccountRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.TskCoreException;
@ -55,7 +62,7 @@ import org.sleuthkit.datamodel.TskCoreException;
"OsAccountsDAO.createSheet.comment.displayName=C",
"OsAccountsDAO.createSheet.count.displayName=O",
"OsAccountsDAO.fileColumns.noDescription=No Description",})
public class OsAccountsDAO {
public class OsAccountsDAO extends AbstractDAO {
private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types
private static final long CACHE_DURATION = 2;
@ -75,6 +82,13 @@ public class OsAccountsDAO {
getFileColumnKey(Bundle.OsAccountsDAO_accountRealmNameProperty_displayName()),
getFileColumnKey(Bundle.OsAccountsDAO_createdTimeProperty_displayName()));
private static final Set<String> OS_EVENTS = ImmutableSet.of(
Case.Events.OS_ACCOUNTS_ADDED.toString(),
Case.Events.OS_ACCOUNTS_DELETED.toString(),
Case.Events.OS_ACCOUNTS_UPDATED.toString(),
Case.Events.OS_ACCT_INSTANCES_ADDED.toString()
);
private static OsAccountsDAO instance = null;
synchronized static OsAccountsDAO getInstance() {
@ -89,7 +103,7 @@ public class OsAccountsDAO {
return new ColumnKey(name, name, Bundle.OsAccountsDAO_fileColumns_noDescription());
}
public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (key == null) {
throw new IllegalArgumentException("Search parameters are null");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
@ -97,13 +111,13 @@ public class OsAccountsDAO {
}
SearchParams<OsAccountsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams));
}
private boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) {
return evt instanceof OsAccountEvent;
}
/**
* Returns a list of paged OS Accounts results.
*
@ -163,7 +177,35 @@ public class OsAccountsDAO {
cellValues));
};
return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, 0, allAccounts.size());
return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, OS_ACCOUNTS_TYPE_ID, 0, allAccounts.size());
}
@Override
void clearCaches() {
this.searchParamsCache.invalidateAll();
}
@Override
Set<DAOEvent> handleIngestComplete() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<TreeEvent> shouldRefreshTree() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
if (!OS_EVENTS.contains(evt.getPropertyName())) {
return Collections.emptySet();
}
this.searchParamsCache.invalidateAll();
return Collections.singleton(new OsAccountEvent());
}
/**
@ -180,19 +222,18 @@ public class OsAccountsDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getOsAccountsDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected OsAccountsDAO getDAO() {
return MainDAO.getInstance().getOsAccountsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(Case.Events.OS_ACCOUNTS_ADDED.toString())
|| eventType.equals(Case.Events.OS_ACCOUNTS_DELETED.toString())) {
return true;
}
return false;
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isOSAccountInvalidatingEvt(this.getParameters(), evt);
}
}
}

View File

@ -21,20 +21,28 @@ package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.List;
/**
*
* @author gregd
* Interface for all search results that are used to display in the table/DataResultViewer area.
*/
public interface SearchResultsDTO {
// returns the type of data
String getTypeId();
// Returns a unique signature for the type of data. Used keep track of custom column ordering.
String getSignature();
// Text to display at top of the table about the type of the results.
String getDisplayName();
// Sorted list of column headers. The RowDTO column values will be in the same order
List<ColumnKey> getColumns();
// Page-sized, sorted list of rows to display
List<RowDTO> getItems();
// total number of results (could be bigger than what is in the results)
long getTotalResultsCount();
// Index in the total results that this set/page starts at
long getStartItem();
}

View File

@ -0,0 +1,110 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import com.google.common.cache.Cache;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
/**
* Utilities for common actions in the sub DAOs.
*/
public class SubDAOUtils {
/**
* Using a digest of event information, clears keys in a cache that may be
* effected by events.
*
* @param cache The cache.
* @param getKeys Using a key from a cache, provides a tuple
* of the relevant key in the data source
* mapping and the data source id (or null if
* no data source filtering).
* @param itemDataSourceMapping The event digest.
*/
static <T, K> void invalidateKeys(Cache<SearchParams<K>, ?> cache, Function<K, Pair<T, Long>> getKeys, Map<T, Set<Long>> itemDataSourceMapping) {
invalidateKeys(cache, getKeys, Collections.singletonList(itemDataSourceMapping));
}
/**
* Using a digest of event information, clears keys in a cache that may be
* effected by events.
*
* @param cache The cache.
* @param getKeys Using a key from a cache, provides a tuple
* of the relevant key in the data source
* mapping and the data source id (or null if
* no data source filtering).
* @param itemDataSourceMapping The list of event digests.
*/
static <T, K> void invalidateKeys(Cache<SearchParams<K>, ?> cache, Function<K, Pair<T, Long>> getKeys, List<Map<T, Set<Long>>> itemDataSourceMapping) {
ConcurrentMap<SearchParams<K>, ?> concurrentMap = cache.asMap();
concurrentMap.forEach((k, v) -> {
Pair<T, Long> pairItems = getKeys.apply(k.getParamData());
T searchParamsKey = pairItems.getLeft();
Long searchParamsDsId = pairItems.getRight();
for (Map<T, Set<Long>> itemDsMapping : itemDataSourceMapping) {
Set<Long> dsIds = itemDsMapping.get(searchParamsKey);
if (dsIds != null && (searchParamsDsId == null || dsIds.contains(searchParamsDsId))) {
concurrentMap.remove(k);
}
}
});
}
/**
* Returns a set of tree events gathered from the TreeCounts instance after
* calling flushEvents.
*
* @param treeCounts The tree counts instance.
* @param converter The means of acquiring a tree item dto to be placed in
* the TreeEvent.
*
* @return The generated tree events.
*/
static <E, T> Set<TreeEvent> getIngestCompleteEvents(TreeCounts<E> treeCounts, Function<E, TreeResultsDTO.TreeItemDTO<T>> converter) {
return treeCounts.flushEvents().stream()
.map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true))
.collect(Collectors.toSet());
}
/**
* Returns a set of tree events gathered from the TreeCounts instance after
* calling getEventTimeouts.
*
* @param treeCounts The tree counts instance.
* @param converter The means of acquiring a tree item dto to be placed in
* the TreeEvent.
*
* @return The generated tree events.
*/
static <E, T> Set<TreeEvent> getRefreshEvents(TreeCounts<E> treeCounts, Function<E, TreeResultsDTO.TreeItemDTO<T>> converter) {
return treeCounts.getEventTimeouts().stream()
.map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true))
.collect(Collectors.toSet());
}
}

View File

@ -18,18 +18,29 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
@ -40,7 +51,9 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType;
import org.sleuthkit.autopsy.mainui.datamodel.events.TagsEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
@ -52,7 +65,7 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* Provides information to populate the results viewer for data in the allTags
section.
* section.
*/
@Messages({"TagsDAO.fileColumns.nameColLbl=Name",
"TagsDAO.fileColumns.originalName=Original Name",
@ -72,15 +85,15 @@ import org.sleuthkit.datamodel.TskCoreException;
"TagsDAO.tagColumns.typeColLbl=Result Type",
"TagsDAO.tagColumns.commentColLbl=Comment",
"TagsDAO.tagColumns.userNameColLbl=User Name"})
public class TagsDAO {
public class TagsDAO extends AbstractDAO {
private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types
private static final long CACHE_DURATION = 2;
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<?>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
private final Cache<SearchParams<TagsSearchParams>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
private static final String USER_NAME_PROPERTY = "user.name"; //NON-NLS
private static final List<ColumnKey> FILE_TAG_COLUMNS = Arrays.asList(
getFileColumnKey(Bundle.TagsDAO_fileColumns_nameColLbl()),
getFileColumnKey(Bundle.TagsDAO_fileColumns_originalName()), // GVDTODO handle translation
@ -115,8 +128,8 @@ public class TagsDAO {
private static ColumnKey getFileColumnKey(String name) {
return new ColumnKey(name, name, Bundle.TagsDAO_fileColumns_noDescription());
}
public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (key.getTagName() == null) {
throw new IllegalArgumentException("Must have non-null tag name");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
@ -124,17 +137,13 @@ public class TagsDAO {
} else if (key.getTagType() == null) {
throw new IllegalArgumentException("Must have non-null tag type");
}
SearchParams<TagsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
SearchParams<TagsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
return searchParamsCache.get(searchParams, () -> fetchTagsDTOs(searchParams));
}
}
@NbBundle.Messages({"FileTag.name.text=File Tag",
"ResultTag.name.text=Result Tag"})
"ResultTag.name.text=Result Tag"})
private SearchResultsDTO fetchTagsDTOs(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
switch (cacheKey.getParamData().getTagType()) {
case FILE:
@ -145,7 +154,7 @@ public class TagsDAO {
throw new IllegalArgumentException("Unsupported tag type");
}
}
/**
* Returns a list of paged tag results.
*
@ -170,7 +179,7 @@ public class TagsDAO {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
TagName tagName = cacheKey.getParamData().getTagName();
// get all tag results
List<BlackboardArtifactTag> allTags = new ArrayList<>();
List<BlackboardArtifactTag> artifactTags = (dataSourceId != null && dataSourceId > 0)
@ -186,21 +195,21 @@ public class TagsDAO {
} else {
allTags.addAll(artifactTags);
}
// get current page of tag results
List<? extends Tag> pagedTags = getPaged(allTags, cacheKey);
List<RowDTO> fileRows = new ArrayList<>();
for (Tag tag : pagedTags) {
BlackboardArtifactTag blackboardTag = (BlackboardArtifactTag) tag;
String name = blackboardTag.getContent().getName(); // As a backup.
try {
name = blackboardTag.getArtifact().getShortDescription();
} catch (TskCoreException ignore) {
// it's a WARNING, skip
}
String contentPath;
try {
contentPath = blackboardTag.getContent().getUniquePath();
@ -221,14 +230,14 @@ public class TagsDAO {
blackboardTag.getId()));
}
return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size());
return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, BlackboardArtifactTag.class.getName(), 0, allTags.size());
}
private SearchResultsDTO fetchFileTags(SearchParams<TagsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
Long dataSourceId = cacheKey.getParamData().getDataSourceId();
TagName tagName = cacheKey.getParamData().getTagName();
// get all tag results
List<ContentTag> allTags = new ArrayList<>();
List<ContentTag> contentTags = (dataSourceId != null && dataSourceId > 0)
@ -244,10 +253,10 @@ public class TagsDAO {
} else {
allTags.addAll(contentTags);
}
// get current page of tag results
List<? extends Tag> pagedTags = getPaged(allTags, cacheKey);
List<RowDTO> fileRows = new ArrayList<>();
for (Tag tag : pagedTags) {
ContentTag contentTag = (ContentTag) tag;
@ -274,9 +283,136 @@ public class TagsDAO {
file.getId()));
}
return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size());
return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, ContentTag.class.getName(), 0, allTags.size());
}
/**
* Returns true if the DAO event could have an impact on the given search
* params.
*
* @param tagParams The tag params.
* @param daoEvt The DAO event.
*
* @return True if the event could affect the results of the search params.
*/
private boolean isTagsInvalidatingEvent(TagsSearchParams tagParams, DAOEvent daoEvt) {
if (!(daoEvt instanceof TagsEvent)) {
return false;
}
TagsEvent tagEvt = (TagsEvent) daoEvt;
return (tagParams.getTagName().getId() == tagEvt.getTagNameId()
&& tagParams.getTagType().equals(tagEvt.getTagType())
&& (tagParams.getDataSourceId() == null
|| tagEvt.getDataSourceId() == null
|| tagParams.getDataSourceId() == tagEvt.getDataSourceId()));
}
@Override
void clearCaches() {
this.searchParamsCache.invalidateAll();
}
@Override
Set<DAOEvent> handleIngestComplete() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<TreeEvent> shouldRefreshTree() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// GVDTODO this may be rewritten simpler now that it isn't processing a list of events
Map<Pair<TagType, Long>, Set<Optional<Long>>> mapping = new HashMap<>();
// tag type, tag name id, data source id (or null if unknown)
Triple<TagType, Long, Long> data = getTagData(evt);
if (data != null) {
mapping.computeIfAbsent(Pair.of(data.getLeft(), data.getMiddle()), k -> new HashSet<>())
.add(Optional.ofNullable(data.getRight()));
}
// don't continue if no mapping entries
if (mapping.isEmpty()) {
return Collections.emptySet();
}
ConcurrentMap<SearchParams<TagsSearchParams>, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap();
concurrentMap.forEach((k, v) -> {
TagsSearchParams paramData = k.getParamData();
Set<Optional<Long>> affectedDataSources = mapping.get(Pair.of(paramData.getTagType(), paramData.getTagName().getId()));
// we only clear key if the tag name / type line up and either the parameters data source wasn't specified,
// there is a wild card data source for the event, or the data source is contained in the list of data sources
// affected by the event
if (affectedDataSources != null
&& (paramData.getDataSourceId() == null
|| affectedDataSources.contains(Optional.empty())
|| affectedDataSources.contains(Optional.of(paramData.getDataSourceId())))) {
concurrentMap.remove(k);
}
});
return mapping.entrySet().stream()
.flatMap(entry -> {
TagType tagType = entry.getKey().getLeft();
Long tagNameId = entry.getKey().getRight();
return entry.getValue().stream()
.map((dsIdOpt) -> new TagsEvent(tagType, tagNameId, dsIdOpt.orElse(null)));
})
.collect(Collectors.toSet());
}
/**
* Returns tag information from an event or null if no tag information
* found.
*
* @param evt The autopsy event.
*
* @return tag type, tag name id, data source id (or null if none determined
* from event).
*/
private Triple<TagType, Long, Long> getTagData(PropertyChangeEvent evt) {
if (evt instanceof BlackBoardArtifactTagAddedEvent) {
BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt;
// ensure tag added event has a valid content id
if (event.getAddedTag() != null
&& event.getAddedTag().getContent() != null
&& event.getAddedTag().getArtifact() != null) {
return Triple.of(TagType.RESULT, event.getAddedTag().getName().getId(), event.getAddedTag().getArtifact().getDataSourceObjectID());
}
} else if (evt instanceof BlackBoardArtifactTagDeletedEvent) {
BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt;
BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo();
if (deletedTagInfo != null) {
return Triple.of(TagType.RESULT, deletedTagInfo.getName().getId(), null);
}
} else if (evt instanceof ContentTagAddedEvent) {
ContentTagAddedEvent event = (ContentTagAddedEvent) evt;
// ensure tag added event has a valid content id
if (event.getAddedTag() != null && event.getAddedTag().getContent() != null) {
Content content = event.getAddedTag().getContent();
Long dsId = content instanceof AbstractFile ? ((AbstractFile) content).getDataSourceObjectId() : null;
return Triple.of(TagType.FILE, event.getAddedTag().getName().getId(), dsId);
}
} else if (evt instanceof ContentTagDeletedEvent) {
ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt;
// ensure tag deleted event has a valid content id
ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo();
if (deletedTagInfo != null) {
return Triple.of(TagType.FILE, deletedTagInfo.getName().getId(), null);
}
}
return null;
}
/**
* Handles fetching and paging of data for allTags.
*/
@ -291,78 +427,18 @@ public class TagsDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getTagsDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected TagsDAO getDAO() {
return MainDAO.getInstance().getTagsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
TagsSearchParams params = this.getParameters();
String eventType = evt.getPropertyName();
// handle artifact/result tag changes
if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString())
|| eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) {
// ignore non-artifact/result tag changes
if (params.getTagType() != TagsSearchParams.TagType.RESULT) {
return false;
}
if (evt instanceof AutopsyEvent) {
if (evt instanceof BlackBoardArtifactTagAddedEvent) {
// An artifact associated with the current case has been tagged.
BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt;
// ensure tag added event has a valid content id
if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) {
return false;
}
return params.getTagName().getId() == event.getAddedTag().getId();
} else if (evt instanceof BlackBoardArtifactTagDeletedEvent) {
// A tag has been removed from an artifact associated with the current case.
BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt;
// ensure tag deleted event has a valid content id
BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo();
if (deletedTagInfo == null) {
return false;
}
return params.getTagName().getId() == deletedTagInfo.getTagID();
}
}
}
// handle file/content tag changes
if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString())
|| eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) {
// ignore non-file/content tag changes
if (params.getTagType() != TagsSearchParams.TagType.FILE) {
return false;
}
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
if (evt instanceof AutopsyEvent) {
if (evt instanceof ContentTagAddedEvent) {
// Content associated with the current case has been tagged.
ContentTagAddedEvent event = (ContentTagAddedEvent) evt;
// ensure tag added event has a valid content id
if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) {
return false;
}
return params.getTagName().getId() == event.getAddedTag().getId();
} else if (evt instanceof ContentTagDeletedEvent) {
// A tag has been removed from content associated with the current case.
ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt;
// ensure tag deleted event has a valid content id
ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo();
if (deletedTagInfo == null) {
return false;
}
return params.getTagName().getId() == deletedTagInfo.getTagID();
}
}
}
return false;
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isTagsInvalidatingEvent(this.getParameters(), evt);
}
}
}

View File

@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.mainui.datamodel;
import java.util.List;
import java.util.Objects;
/**
* A list of items to display in the tree.
@ -43,6 +44,85 @@ public class TreeResultsDTO<T> {
return items;
}
/**
* Captures the count to be displayed in the UI.
*/
public static class TreeDisplayCount {
public enum Type {
DETERMINATE,
INDETERMINATE,
NOT_SHOWN,
UNSPECIFIED
}
private final Type type;
private final long count;
public static final TreeDisplayCount INDETERMINATE = new TreeDisplayCount(Type.INDETERMINATE, -1);
public static final TreeDisplayCount NOT_SHOWN = new TreeDisplayCount(Type.NOT_SHOWN, -1);
public static final TreeDisplayCount UNSPECIFIED = new TreeDisplayCount(Type.UNSPECIFIED, -1);
public static TreeDisplayCount getDeterminate(long count) {
return new TreeDisplayCount(Type.DETERMINATE, count);
}
private TreeDisplayCount(Type type, long count) {
this.type = type;
this.count = count;
}
public Type getType() {
return type;
}
public long getCount() {
return count;
}
public String getDisplaySuffix() {
switch (this.type) {
case DETERMINATE:
return " (" + count + ")";
case INDETERMINATE:
return "...";
case NOT_SHOWN:
default:
return "";
}
}
@Override
public int hashCode() {
int hash = 5;
hash = 97 * hash + Objects.hashCode(this.type);
hash = 97 * hash + (int) (this.count ^ (this.count >>> 32));
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TreeDisplayCount other = (TreeDisplayCount) obj;
if (this.count != other.count) {
return false;
}
if (this.type != other.type) {
return false;
}
return true;
}
}
/**
* A result providing a category and a count for that category. Equals and
* hashCode are based on id, type id, and type data.
@ -51,29 +131,28 @@ public class TreeResultsDTO<T> {
private final String displayName;
private final String typeId;
private final Long count;
private final T typeData;
private final TreeDisplayCount count;
private final T searchParams;
private final Object id;
/**
* Main constructor.
*
* @param typeId The id of this item type.
* @param typeData Data for this particular row's type (i.e.
* BlackboardArtifact.Type for counts of a particular
* artifact type).
* @param id The id of this row. Can be any object that
* implements equals and hashCode.
* @param displayName The display name of this row.
* @param count The count of results for this row or null if not
* applicable.
* @param typeId The id of this item type.
* @param searchParams Search params for this tree item that can be used
* to display results.
* @param id The id of this row. Can be any object that
* implements equals and hashCode.
* @param displayName The display name of this row.
* @param count The count of results for this row or null if not
* applicable.
*/
public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) {
public TreeItemDTO(String typeId, T searchParams, Object id, String displayName, TreeDisplayCount count) {
this.typeId = typeId;
this.id = id;
this.displayName = displayName;
this.count = count;
this.typeData = typeData;
this.searchParams = searchParams;
}
/**
@ -86,18 +165,17 @@ public class TreeResultsDTO<T> {
/**
* @return The count of results for this row or null if not applicable.
*/
public Long getCount() {
public TreeDisplayCount getDisplayCount() {
return count;
}
/**
*
* @return Data for this particular row's type (i.e.
* BlackboardArtifact.Type for counts of a particular artifact
* type).
* @return Search params for this tree item that can be used to display
* results.
*/
public T getTypeData() {
return typeData;
public T getSearchParams() {
return searchParams;
}
/**
@ -114,7 +192,5 @@ public class TreeResultsDTO<T> {
public String getTypeId() {
return typeId;
}
}
}

View File

@ -18,16 +18,22 @@
*/
package org.sleuthkit.autopsy.mainui.datamodel;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.beans.PropertyChangeEvent;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
@ -35,18 +41,22 @@ import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTree;
import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree;
import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeExtensionsEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeMimeEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeSizeEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -55,7 +65,7 @@ import org.sleuthkit.datamodel.TskData;
* Provides information to populate the results viewer for data in the views
* section.
*/
public class ViewsDAO {
public class ViewsDAO extends AbstractDAO {
private static final Logger logger = Logger.getLogger(ViewsDAO.class.getName());
@ -76,40 +86,11 @@ public class ViewsDAO {
return instance;
}
static ExtensionMediaType getExtensionMediaType(String ext) {
if (StringUtils.isBlank(ext)) {
return ExtensionMediaType.UNCATEGORIZED;
} else {
ext = "." + ext;
}
if (FileTypeExtensions.getImageExtensions().contains(ext)) {
return ExtensionMediaType.IMAGE;
} else if (FileTypeExtensions.getVideoExtensions().contains(ext)) {
return ExtensionMediaType.VIDEO;
} else if (FileTypeExtensions.getAudioExtensions().contains(ext)) {
return ExtensionMediaType.AUDIO;
} else if (FileTypeExtensions.getDocumentExtensions().contains(ext)) {
return ExtensionMediaType.DOC;
} else if (FileTypeExtensions.getExecutableExtensions().contains(ext)) {
return ExtensionMediaType.EXECUTABLE;
} else if (FileTypeExtensions.getTextExtensions().contains(ext)) {
return ExtensionMediaType.TEXT;
} else if (FileTypeExtensions.getWebExtensions().contains(ext)) {
return ExtensionMediaType.WEB;
} else if (FileTypeExtensions.getPDFExtensions().contains(ext)) {
return ExtensionMediaType.PDF;
} else if (FileTypeExtensions.getArchiveExtensions().contains(ext)) {
return ExtensionMediaType.ARCHIVE;
} else {
return ExtensionMediaType.UNCATEGORIZED;
}
}
private SleuthkitCase getCase() throws NoCurrentCaseException {
return Case.getCurrentCaseThrows().getSleuthkitCase();
}
public SearchResultsDTO getFilesByExtension(FileTypeExtensionsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public SearchResultsDTO getFilesByExtension(FileTypeExtensionsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (key.getFilter() == null) {
throw new IllegalArgumentException("Must have non-null filter");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
@ -117,14 +98,10 @@ public class ViewsDAO {
}
SearchParams<FileTypeExtensionsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchExtensionSearchResultsDTOs(key.getFilter(), key.getDataSourceId(), startItem, maxCount));
}
public SearchResultsDTO getFilesByMime(FileTypeMimeSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public SearchResultsDTO getFilesByMime(FileTypeMimeSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (key.getMimeType() == null) {
throw new IllegalArgumentException("Must have non-null filter");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
@ -132,14 +109,10 @@ public class ViewsDAO {
}
SearchParams<FileTypeMimeSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchMimeSearchResultsDTOs(key.getMimeType(), key.getDataSourceId(), startItem, maxCount));
}
public SearchResultsDTO getFilesBySize(FileTypeSizeSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
public SearchResultsDTO getFilesBySize(FileTypeSizeSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException {
if (key.getSizeFilter() == null) {
throw new IllegalArgumentException("Must have non-null filter");
} else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
@ -147,41 +120,38 @@ public class ViewsDAO {
}
SearchParams<FileTypeSizeSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
if (hardRefresh) {
this.searchParamsCache.invalidate(searchParams);
}
return searchParamsCache.get(searchParams, () -> fetchSizeSearchResultsDTOs(key.getSizeFilter(), key.getDataSourceId(), startItem, maxCount));
}
public boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, Content eventData) {
if (!(eventData instanceof AbstractFile)) {
private boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, DAOEvent eventData) {
if (!(eventData instanceof FileTypeExtensionsEvent)) {
return false;
}
AbstractFile file = (AbstractFile) eventData;
String extension = "." + file.getNameExtension().toLowerCase();
return key.getFilter().getFilter().contains(extension);
FileTypeExtensionsEvent extEvt = (FileTypeExtensionsEvent) eventData;
String extension = extEvt.getExtension().toLowerCase();
return key.getFilter().getFilter().contains(extension)
&& (key.getDataSourceId() == null || key.getDataSourceId().equals(extEvt.getDataSourceId()));
}
public boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, Content eventData) {
if (!(eventData instanceof AbstractFile)) {
private boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, DAOEvent eventData) {
if (!(eventData instanceof FileTypeMimeEvent)) {
return false;
}
AbstractFile file = (AbstractFile) eventData;
String mimeType = file.getMIMEType();
return key.getMimeType().equalsIgnoreCase(mimeType);
FileTypeMimeEvent mimeEvt = (FileTypeMimeEvent) eventData;
return mimeEvt.getMimeType().startsWith(key.getMimeType())
&& (key.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), mimeEvt.getDataSourceId()));
}
public boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, Content eventData) {
if (!(eventData instanceof AbstractFile)) {
private boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, DAOEvent eventData) {
if (!(eventData instanceof FileTypeSizeEvent)) {
return false;
}
long size = eventData.getSize();
return size >= key.getSizeFilter().getMinBound() && (key.getSizeFilter().getMaxBound() == null || size < key.getSizeFilter().getMaxBound());
FileTypeSizeEvent sizeEvt = (FileTypeSizeEvent) eventData;
return sizeEvt.getSizeFilter().equals(key.getSizeFilter())
&& (key.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), sizeEvt.getDataSourceId()));
}
/**
@ -286,7 +256,7 @@ public class ViewsDAO {
*
* @return The clause to be proceeded with 'where' or 'and'.
*/
private static String getFileSizeClause(FileTypeSizeSearchParams.FileSizeFilter filter) {
private static String getFileSizeClause(FileSizeFilter filter) {
return filter.getMaxBound() == null
? "(size >= " + filter.getMinBound() + ")"
: "(size >= " + filter.getMinBound() + " AND size < " + filter.getMaxBound() + ")";
@ -314,7 +284,7 @@ public class ViewsDAO {
*
* @return The clause to be proceeded with 'where' or 'and'.
*/
private String getFileSizesWhereStatement(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId) {
private String getFileSizesWhereStatement(FileSizeFilter filter, Long dataSourceId) {
String query = getBaseFileSizeFilter()
+ " AND " + getFileSizeClause(filter)
+ getDataSourceAndClause(dataSourceId);
@ -350,7 +320,7 @@ public class ViewsDAO {
new FileTypeExtensionsSearchParams(entry.getKey(), dataSourceId),
entry.getKey(),
entry.getKey().getDisplayName(),
entry.getValue());
TreeDisplayCount.getDeterminate(entry.getValue()));
})
.sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName()))
.collect(Collectors.toList());
@ -370,12 +340,12 @@ public class ViewsDAO {
* @throws ExecutionException
*/
public TreeResultsDTO<FileTypeSizeSearchParams> getFileSizeCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException {
Map<FileTypeSizeSearchParams.FileSizeFilter, String> whereClauses = Stream.of(FileTypeSizeSearchParams.FileSizeFilter.values())
Map<FileSizeFilter, String> whereClauses = Stream.of(FileSizeFilter.values())
.collect(Collectors.toMap(
filter -> filter,
filter -> getFileSizeClause(filter)));
Map<FileTypeSizeSearchParams.FileSizeFilter, Long> countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true);
Map<FileSizeFilter, Long> countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true);
List<TreeItemDTO<FileTypeSizeSearchParams>> treeList = countsByFilter.entrySet().stream()
.map(entry -> {
@ -384,7 +354,7 @@ public class ViewsDAO {
new FileTypeSizeSearchParams(entry.getKey(), dataSourceId),
entry.getKey(),
entry.getKey().getDisplayName(),
entry.getValue());
TreeDisplayCount.getDeterminate(entry.getValue()));
})
.sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName()))
.collect(Collectors.toList());
@ -469,9 +439,9 @@ public class ViewsDAO {
new FileTypeMimeSearchParams(entry.getKey(), dataSourceId),
name,
name,
entry.getValue());
TreeDisplayCount.getDeterminate(entry.getValue()));
})
.sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType()))
.sorted((a, b) -> stringCompare(a.getSearchParams().getMimeType(), b.getSearchParams().getMimeType()))
.collect(Collectors.toList());
return new TreeResultsDTO<>(treeList);
@ -597,7 +567,7 @@ public class ViewsDAO {
return fetchFileViewFiles(whereStatement, MIME_TYPE_DISPLAY_NAME, startItem, maxResultCount);
}
private SearchResultsDTO fetchSizeSearchResultsDTOs(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException {
private SearchResultsDTO fetchSizeSearchResultsDTOs(FileSizeFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException {
String whereStatement = getFileSizesWhereStatement(filter, dataSourceId);
return fetchFileViewFiles(whereStatement, filter.getDisplayName(), startItem, maxResultCount);
}
@ -633,13 +603,188 @@ public class ViewsDAO {
file.getId(),
file.getName(),
file.getNameExtension(),
getExtensionMediaType(file.getNameExtension()),
MediaTypeUtils.getExtensionMediaType(file.getNameExtension()),
file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC),
file.getType(),
cellValues));
}
return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, startItem, totalResultsCount);
return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, AbstractFile.class.getName(), startItem, totalResultsCount);
}
@Override
void clearCaches() {
this.searchParamsCache.invalidateAll();
}
private Pair<String, String> getMimePieces(String mimeType) {
int idx = mimeType.indexOf("/");
String mimePrefix = idx > 0 ? mimeType.substring(0, idx) : mimeType;
String mimeSuffix = idx > 0 ? mimeType.substring(idx + 1) : null;
return Pair.of(mimePrefix, mimeSuffix);
}
@Override
Set<DAOEvent> handleIngestComplete() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<TreeEvent> shouldRefreshTree() {
// GVDTODO
return Collections.emptySet();
}
@Override
Set<DAOEvent> processEvent(PropertyChangeEvent evt) {
// GVDTODO maps may not be necessary now that this isn't processing a list of events.
Map<String, Set<Long>> fileExtensionDsMap = new HashMap<>();
Map<String, Map<String, Set<Long>>> mimeTypeDsMap = new HashMap<>();
Map<FileSizeFilter, Set<Long>> fileSizeDsMap = new HashMap<>();
AbstractFile af = DAOEventUtils.getFileFromFileEvent(evt);
if (af == null) {
return Collections.emptySet();
}
// create an extension mapping if extension present
if (!StringUtils.isBlank(af.getNameExtension())) {
fileExtensionDsMap
.computeIfAbsent("." + af.getNameExtension(), (k) -> new HashSet<>())
.add(af.getDataSourceObjectId());
}
// create a mime type mapping if mime type present
if (!StringUtils.isBlank(af.getMIMEType())) {
Pair<String, String> mimePieces = getMimePieces(af.getMIMEType());
mimeTypeDsMap
.computeIfAbsent(mimePieces.getKey(), (k) -> new HashMap<>())
.computeIfAbsent(mimePieces.getValue(), (k) -> new HashSet<>())
.add(af.getDataSourceObjectId());
}
// create a size mapping if size present
FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values())
.filter(filter -> af.getSize() >= filter.getMinBound() && (filter.getMaxBound() == null || af.getSize() < filter.getMaxBound()))
.findFirst()
.orElse(null);
if (sizeFilter != null) {
fileSizeDsMap
.computeIfAbsent(sizeFilter, (k) -> new HashSet<>())
.add(af.getDataSourceObjectId());
}
if (fileExtensionDsMap.isEmpty() && mimeTypeDsMap.isEmpty() && fileSizeDsMap.isEmpty()) {
return Collections.emptySet();
}
clearRelevantCacheEntries(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap);
return getDAOEvents(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap);
}
/**
*
* Clears relevant cache entries from cache based on digest of autopsy
* events.
*
* @param fileExtensionDsMap Maps the file extension to the data sources
* where files were found with that extension.
* @param mimeTypeDsMap Maps the mime type to the data sources where
* files were found with that mime type.
* @param fileSizeDsMap Maps the size to the data sources where files
*
* @return The list of affected dao events.
*/
private Set<DAOEvent> getDAOEvents(Map<String, Set<Long>> fileExtensionDsMap,
Map<String, Map<String, Set<Long>>> mimeTypeDsMap,
Map<FileSizeFilter, Set<Long>> fileSizeDsMap) {
Stream<DAOEvent> fileExtStream = fileExtensionDsMap.entrySet().stream()
.flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeExtensionsEvent(entry.getKey(), dsId)));
Set<DAOEvent> fileMimeList = new HashSet<>();
for (Entry<String, Map<String, Set<Long>>> prefixEntry : mimeTypeDsMap.entrySet()) {
String mimePrefix = prefixEntry.getKey();
for (Entry<String, Set<Long>> suffixEntry : prefixEntry.getValue().entrySet()) {
String mimeSuffix = suffixEntry.getKey();
for (long dsId : suffixEntry.getValue()) {
String mimeType = mimePrefix + (mimeSuffix == null ? "" : ("/" + mimeSuffix));
fileMimeList.add(new FileTypeMimeEvent(mimeType, dsId));
}
}
}
Stream<DAOEvent> fileSizeStream = fileSizeDsMap.entrySet().stream()
.flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeSizeEvent(entry.getKey(), dsId)));
return Stream.of(fileExtStream, fileMimeList.stream(), fileSizeStream)
.flatMap(stream -> stream)
.collect(Collectors.toSet());
}
/**
* Clears relevant cache entries from cache based on digest of autopsy
* events.
*
* @param fileExtensionDsMap Maps the file extension to the data sources
* where files were found with that extension.
* @param mimeTypeDsMap Maps the mime type to the data sources where
* files were found with that mime type.
* @param fileSizeDsMap Maps the size to the data sources where files
* were found within that size filter.
*/
private void clearRelevantCacheEntries(Map<String, Set<Long>> fileExtensionDsMap,
Map<String, Map<String, Set<Long>>> mimeTypeDsMap,
Map<FileSizeFilter, Set<Long>> fileSizeDsMap) {
// invalidate cache entries that are affected by events
ConcurrentMap<SearchParams<?>, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap();
concurrentMap.forEach((k, v) -> {
Object baseParams = k.getParamData();
if (baseParams instanceof FileTypeExtensionsSearchParams) {
FileTypeExtensionsSearchParams extParams = (FileTypeExtensionsSearchParams) baseParams;
// if search params have a filter where extension is present and the data source id is null or ==
boolean isMatch = extParams.getFilter().getFilter().stream().anyMatch((ext) -> {
Set<Long> dsIds = fileExtensionDsMap.get(ext);
return (dsIds != null && (extParams.getDataSourceId() == null || dsIds.contains(extParams.getDataSourceId())));
});
if (isMatch) {
concurrentMap.remove(k);
}
} else if (baseParams instanceof FileTypeMimeSearchParams) {
FileTypeMimeSearchParams mimeParams = (FileTypeMimeSearchParams) baseParams;
Pair<String, String> mimePieces = getMimePieces(mimeParams.getMimeType());
Map<String, Set<Long>> suffixes = mimeTypeDsMap.get(mimePieces.getKey());
if (suffixes == null) {
return;
}
// if search params is top level mime prefix (without suffix) and data source is null or ==.
if (mimePieces.getValue() == null
&& (mimeParams.getDataSourceId() == null
|| suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> Objects.equals(mimeParams.getDataSourceId(), ds)))) {
concurrentMap.remove(k);
// otherwise, see if suffix is present
} else {
Set<Long> dataSources = suffixes.get(mimePieces.getValue());
if (dataSources != null && (mimeParams.getDataSourceId() == null || dataSources.contains(mimeParams.getDataSourceId()))) {
concurrentMap.remove(k);
}
}
} else if (baseParams instanceof FileTypeSizeSearchParams) {
FileTypeSizeSearchParams sizeParams = (FileTypeSizeSearchParams) baseParams;
Set<Long> dataSources = fileSizeDsMap.get(sizeParams.getSizeFilter());
if (dataSources != null && (sizeParams.getDataSourceId() == null || dataSources.contains(sizeParams.getDataSourceId()))) {
concurrentMap.remove(k);
}
}
});
}
/**
@ -656,19 +801,18 @@ public class ViewsDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getViewsDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected ViewsDAO getDAO() {
return MainDAO.getInstance().getViewsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
Content content = this.getContentFromEvt(evt);
if (content == null) {
return false;
}
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
return MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating(this.getParameters(), content);
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isFilesByExtInvalidating(this.getParameters(), evt);
}
}
@ -686,26 +830,25 @@ public class ViewsDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getViewsDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected ViewsDAO getDAO() {
return MainDAO.getInstance().getViewsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
Content content = this.getContentFromEvt(evt);
if (content == null) {
return false;
}
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
return MainDAO.getInstance().getViewsDAO().isFilesByMimeInvalidating(this.getParameters(), content);
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isFilesByMimeInvalidating(this.getParameters(), evt);
}
}
/**
* Handles fetching and paging of data for file types by size.
*/
public static class FileTypeSizeFetcher extends DAOFetcher<FileTypeSizeSearchParams> {
public class FileTypeSizeFetcher extends DAOFetcher<FileTypeSizeSearchParams> {
/**
* Main constructor.
@ -716,19 +859,18 @@ public class ViewsDAO {
super(params);
}
@Override
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
return MainDAO.getInstance().getViewsDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
protected ViewsDAO getDAO() {
return MainDAO.getInstance().getViewsDAO();
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
Content content = this.getContentFromEvt(evt);
if (content == null) {
return false;
}
public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException {
return getDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize);
}
return MainDAO.getInstance().getViewsDAO().isFilesBySizeInvalidating(this.getParameters(), content);
@Override
public boolean isRefreshRequired(DAOEvent evt) {
return getDAO().isFilesBySizeInvalidating(this.getParameters(), evt);
}
}
}

View File

@ -0,0 +1,33 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* An event for an artifact added or changed of a particular type possibly for a
* particular data source.
*/
public class AnalysisResultEvent extends BlackboardArtifactEvent {
public AnalysisResultEvent(BlackboardArtifact.Type artifactType, long dataSourceId) {
super(artifactType, dataSourceId);
}
}

View File

@ -0,0 +1,38 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* An event for an Analysis Result that is organized by Set names to
* signal that one has been added or removed on a given data source.
*/
public class AnalysisResultSetEvent extends AnalysisResultEvent {
private final String setName;
public AnalysisResultSetEvent(String setName, BlackboardArtifact.Type artifactType, long dataSourceId) {
super(artifactType, dataSourceId);
this.setName = setName;
}
public String getSetName() {
return setName;
}
}

View File

@ -0,0 +1,79 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* A base class for DataArtifact and AnalysisResult events to signal that one
* has been added or removed.
*/
public abstract class BlackboardArtifactEvent implements DAOEvent {
private final BlackboardArtifact.Type artifactType;
private final long dataSourceId;
BlackboardArtifactEvent(BlackboardArtifact.Type artifactType, long dataSourceId) {
this.artifactType = artifactType;
this.dataSourceId = dataSourceId;
}
public BlackboardArtifact.Type getArtifactType() {
return artifactType;
}
public long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 5;
hash = 17 * hash + Objects.hashCode(this.artifactType);
hash = 17 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32));
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final BlackboardArtifactEvent other = (BlackboardArtifactEvent) obj;
if (this.dataSourceId != other.dataSourceId) {
return false;
}
if (!Objects.equals(this.artifactType, other.artifactType)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,76 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* An event for handling
*/
public class CommAccountsEvent extends DataArtifactEvent {
private final Account.Type accountType;
/**
* Main constructor.
*
* @param accountType The account type identifier.
* @param dataSourceId The data source id to filter on or null.
*/
public CommAccountsEvent(Account.Type accountType, Long dataSourceId) {
super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId);
this.accountType = accountType;
}
/**
* @return The account type identifier.
*/
public Account.Type getAccountType() {
return accountType;
}
@Override
public int hashCode() {
int hash = 7;
hash = 29 * hash + Objects.hashCode(this.accountType);
hash = 29 * hash + super.hashCode();
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final CommAccountsEvent other = (CommAccountsEvent) obj;
if (!Objects.equals(this.accountType, other.accountType)) {
return false;
}
return super.equals(obj);
}
}

View File

@ -0,0 +1,46 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Collections;
import java.util.Set;
/**
* A single event containing an aggregate of all affected data.
*/
public class DAOAggregateEvent {
private final Set<? extends DAOEvent> objects;
/**
* Main constructor.
*
* @param objects The list of events in this aggregate event.
*/
public DAOAggregateEvent(Set<? extends DAOEvent> objects) {
this.objects = Collections.unmodifiableSet(objects);
}
/**
* @return The events in this aggregate event.
*/
public Set<? extends DAOEvent> getEvents() {
return objects;
}
}

View File

@ -0,0 +1,28 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
/**
* An event emitted by the DAO.
*/
public interface DAOEvent {
public enum Type { TREE, RESULT }
DAOEvent.Type getType();
}

View File

@ -0,0 +1,123 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.commons.collections4.CollectionUtils;
/**
*
* Handles refreshes in DAOs based on incoming events handling throttles
*/
public class DAOEventBatcher<T> {
/**
* The Refresher interface needs to be implemented by ChildFactory instances
* that wish to take advantage of throttled refresh functionality.
*/
public interface BatchedEventsHandler<T> {
/**
* Handles a list of aggregated events.
*
* @param events The events to handle.
*/
void handle(Set<T> events);
}
private final ScheduledThreadPoolExecutor refreshExecutor
= new ScheduledThreadPoolExecutor(1,
new ThreadFactoryBuilder().setNameFormat(DAOEventBatcher.class.getName()).build());
private Set<T> aggregateEvents = new HashSet<>();
private Object eventListLock = new Object();
private boolean isRunning = false;
private final BatchedEventsHandler<T> eventsHandler;
private final long batchMillis;
public DAOEventBatcher(BatchedEventsHandler<T> eventsHandler, long batchMillis) {
this.eventsHandler = eventsHandler;
this.batchMillis = batchMillis;
}
/**
* Queues an event to be fired as a part of a time-windowed batch.
*
* @param event The event.
*/
public void queueEvent(T event) {
synchronized (this.eventListLock) {
this.aggregateEvents.add(event);
verifyRunning();
}
}
/**
* Starts up throttled event runner if not currently running.
*/
private void verifyRunning() {
synchronized (this.eventListLock) {
if (!this.isRunning) {
refreshExecutor.schedule(() -> fireEvents(), this.batchMillis, TimeUnit.MILLISECONDS);
this.isRunning = true;
}
}
}
/**
* Queues an event to be fired as a part of a time-windowed batch.
*
* @param events The events.
*/
public void enqueueAllEvents(Collection<T> events) {
if (CollectionUtils.isNotEmpty(events)) {
synchronized (this.eventListLock) {
this.aggregateEvents.addAll(events);
verifyRunning();
}
}
}
/**
* Flushes any currently batched events emptying queue of batched events.
*
* @return The flushed events.
*/
public Set<T> flushEvents() {
synchronized (this.eventListLock) {
Set<T> evtsToFire = this.aggregateEvents;
this.aggregateEvents = new HashSet<>();
this.isRunning = false;
return evtsToFire;
}
}
/**
* Fires all events and clears batch.
*/
private void fireEvents() {
this.eventsHandler.handle(flushEvents());
}
}

View File

@ -0,0 +1,108 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.beans.PropertyChangeEvent;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
/**
*
* Utilities for handling events in DAO
*/
public class DAOEventUtils {
/**
* Returns the file content from the event. If the event is not a file event
* or the event does not contain file content, null is returned.
*
* @param evt The event
*
* @return The inner content or null if no content.
*/
public static Content getContentFromFileEvent(PropertyChangeEvent evt) {
String eventName = evt.getPropertyName();
Content derivedContent = getDerivedFileContentFromFileEvent(evt);
if (derivedContent != null) {
return derivedContent;
} else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName)
&& (evt.getNewValue() instanceof Content)) {
return (Content) evt.getNewValue();
} else {
return null;
}
}
/**
* Returns the content from the ModuleContentEvent. If the event does not
* contain a event or the event does not contain Content, null is returned.
*
* @param evt The event
*
* @return The inner content or null if no content.
*/
public static Content getDerivedFileContentFromFileEvent(PropertyChangeEvent evt) {
String eventName = evt.getPropertyName();
if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName)
&& (evt.getOldValue() instanceof ModuleContentEvent)
&& ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) {
return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource();
} else {
return null;
}
}
/**
* Returns a file in the event if a file is found in the event.
*
* @param evt The autopsy event.
*
* @return The inner file or null if no file found.
*/
public static AbstractFile getFileFromFileEvent(PropertyChangeEvent evt) {
Content content = getContentFromFileEvent(evt);
return (content instanceof AbstractFile)
? ((AbstractFile) content)
: null;
}
/**
* Returns the ModuleDataEvent in the event if there is a child
* ModuleDataEvent. If not, null is returned.
*
* @param evt The event.
*
* @return The inner ModuleDataEvent or null.
*/
public static ModuleDataEvent getModuelDataFromArtifactEvent(PropertyChangeEvent evt) {
String eventName = evt.getPropertyName();
if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName)
&& (evt.getOldValue() instanceof ModuleDataEvent)) {
return (ModuleDataEvent) evt.getOldValue();
} else {
return null;
}
}
}

View File

@ -0,0 +1,32 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* An event for an artifact added or changed of a particular type possibly for a
* particular data source.
*/
public class DataArtifactEvent extends BlackboardArtifactEvent {
public DataArtifactEvent(BlackboardArtifact.Type artifactType, long dataSourceId) {
super(artifactType, dataSourceId);
}
}

View File

@ -0,0 +1,74 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
/**
* An event signaling that children files were added or removed from the given
* parent ID.
*/
public class FileSystemContentEvent implements DAOEvent {
private final Long contentObjectId;
/**
* Main constructor.
*
* @param contentObjectId The parent content object id. If null, performs
* full refresh of file tree.
*/
public FileSystemContentEvent(Long contentObjectId) {
this.contentObjectId = contentObjectId;
}
public Long getContentObjectId() {
return contentObjectId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.contentObjectId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileSystemContentEvent other = (FileSystemContentEvent) obj;
if (!Objects.equals(this.contentObjectId, other.contentObjectId)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,68 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
/**
* An event signaling that a data source has been added or removed from the
* given Host.
*/
public class FileSystemHostEvent implements DAOEvent {
private final Long hostObjectId;
public FileSystemHostEvent(Long hostObjectId) {
this.hostObjectId = hostObjectId;
}
public Long getHostObjectId() {
return hostObjectId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.hostObjectId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileSystemHostEvent other = (FileSystemHostEvent) obj;
if (!Objects.equals(this.hostObjectId, other.hostObjectId)) {
return false;
}
return true;
}
@Override
public DAOEvent.Type getType() {
return DAOEvent.Type.RESULT;
}
}

View File

@ -0,0 +1,73 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
/**
* An event signaling that a host has been added or removed from the given
* Person.
*/
public class FileSystemPersonEvent implements DAOEvent {
private final Long personObjectId;
/**
* Main constructor.
*
* @param personObjectId May be null for hosts with no associated Person.
*/
public FileSystemPersonEvent(Long personObjectId) {
this.personObjectId = personObjectId;
}
public Long getPersonObjectId() {
return personObjectId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.personObjectId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileSystemPersonEvent other = (FileSystemPersonEvent) obj;
if (!Objects.equals(this.personObjectId, other.personObjectId)) {
return false;
}
return true;
}
@Override
public DAOEvent.Type getType() {
return DAOEvent.Type.RESULT;
}
}

View File

@ -0,0 +1,78 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
/**
* An event to signal that files have been added or removed
* with the given extension on the given data source.
*/
public class FileTypeExtensionsEvent implements DAOEvent {
private final String extension;
private final long dataSourceId;
public FileTypeExtensionsEvent(String extension, long dataSourceId) {
this.extension = extension;
this.dataSourceId = dataSourceId;
}
public String getExtension() {
return extension;
}
public long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 59 * hash + Objects.hashCode(this.extension);
hash = 59 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32));
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileTypeExtensionsEvent other = (FileTypeExtensionsEvent) obj;
if (this.dataSourceId != other.dataSourceId) {
return false;
}
if (!Objects.equals(this.extension, other.extension)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,77 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
/**
* An event pertaining to MIME types view from the DAO.
*/
public class FileTypeMimeEvent implements DAOEvent {
private final String mimeType;
private final long dataSourceId;
public FileTypeMimeEvent(String mimeType, long dataSourceId) {
this.mimeType = mimeType;
this.dataSourceId = dataSourceId;
}
public String getMimeType() {
return mimeType;
}
public long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 29 * hash + Objects.hashCode(this.mimeType);
hash = 29 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileTypeMimeEvent other = (FileTypeMimeEvent) obj;
if (!Objects.equals(this.mimeType, other.mimeType)) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,79 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.autopsy.mainui.datamodel.FileSizeFilter;
/**
* An event to signal that files have been added or removed
* within the given size range on the given data source.
*/
public class FileTypeSizeEvent implements DAOEvent {
private final FileSizeFilter sizeFilter;
private final Long dataSourceId;
public FileTypeSizeEvent(FileSizeFilter sizeFilter, Long dataSourceId) {
this.sizeFilter = sizeFilter;
this.dataSourceId = dataSourceId;
}
public FileSizeFilter getSizeFilter() {
return sizeFilter;
}
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 53 * hash + Objects.hashCode(this.sizeFilter);
hash = 53 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileTypeSizeEvent other = (FileTypeSizeEvent) obj;
if (this.sizeFilter != other.sizeFilter) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,45 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import org.sleuthkit.datamodel.BlackboardArtifact;
/**
* An event for an artifact added or changed of a particular type possibly for a
* particular data source.
*/
public class KeywordHitEvent extends AnalysisResultSetEvent {
private final String regex;
private final String match;
public KeywordHitEvent(String regex, String match, String setName, BlackboardArtifact.Type artifactType, long dataSourceId) {
super(setName, artifactType, dataSourceId);
this.regex = regex;
this.match = match;
}
public String getRegex() {
return regex;
}
public String getMatch() {
return match;
}
}

View File

@ -0,0 +1,30 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
/**
* An event that OS Accounts were changed.
*/
public class OsAccountEvent implements DAOEvent {
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,93 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType;
/**
* An event to signal that tags have been added or removed on the
* given data source with the given types.
*/
public class TagsEvent implements DAOEvent {
private final TagType type;
private final Long tagNameId;
private final Long dataSourceId;
public TagsEvent(TagType type, Long tagNameId, Long dataSourceId) {
this.type = type;
this.tagNameId = tagNameId;
this.dataSourceId = dataSourceId;
}
public TagType getTagType() {
return type;
}
public Long getTagNameId() {
return tagNameId;
}
/**
* @return The data source object id for the tag. Is null if cannot be
* determined.
*/
public Long getDataSourceId() {
return dataSourceId;
}
@Override
public int hashCode() {
int hash = 7;
hash = 97 * hash + Objects.hashCode(this.type);
hash = 97 * hash + Objects.hashCode(this.tagNameId);
hash = 97 * hash + Objects.hashCode(this.dataSourceId);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TagsEvent other = (TagsEvent) obj;
if (this.type != other.type) {
return false;
}
if (!Objects.equals(this.tagNameId, other.tagNameId)) {
return false;
}
if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.RESULT;
}
}

View File

@ -0,0 +1,149 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* This class is in charge of tracking tree events. When an autopsy event comes
* in that affects a tree node, the sub DAO's enqueue the event in this class
* along with the timeout (current time + timeoutMillis). If another autopsy
* event comes in affecting the same tree category, the timeout is reset. Events
* are not removed from tracking until getEventTimeouts is flushEvents are
* called. The MainDAO has a periodically running task to see if any tree events
* have timed out, and broadcasts those events that have reached timeout.
*/
public class TreeCounts<T> {
private static final long DEFAULT_TIMEOUT_MILLIS = 2 * 60 * 1000;
private final Object timeoutLock = new Object();
private final Map<T, Long> eventTimeouts = new HashMap<>();
private final long timeoutMillis;
/**
* Constructor that uses default timeout duration.
*/
public TreeCounts() {
this(DEFAULT_TIMEOUT_MILLIS);
}
/**
* Main constructor.
*
* @param timeoutMillis How long to track an event before it reaches a
* timeout (in milliseconds).
*/
public TreeCounts(long timeoutMillis) {
this.timeoutMillis = timeoutMillis;
}
/**
* Returns the current time in milliseconds.
*
* @return The current time in milliseconds.
*/
private long getCurTime() {
return System.currentTimeMillis();
}
/**
* Returns the timeout time based on the current time.
*
* @return The current time.
*/
private long getTimeoutTime() {
return getCurTime() + timeoutMillis;
}
/**
* Adds events to be tracked until they reach timeout.
*
* @param events The events to be tracked.
*
* @return The subset of events that were not already being tracked.
*/
public Collection<T> enqueueAll(Collection<T> events) {
Collection<T> updateToIndeterminate = new ArrayList<>();
synchronized (this.timeoutLock) {
for (T event : events) {
this.eventTimeouts.compute(event, (k, v) -> {
if (v == null) {
updateToIndeterminate.add(event);
}
return getTimeoutTime();
});
}
}
return updateToIndeterminate;
}
/**
* Returns the set of events that are currently being tracked for timeout.
*
* @return The events that are being tracked for timeout.
*/
public Set<T> getEnqueued() {
return new HashSet<>(eventTimeouts.keySet());
}
/**
* Returns the events that have reached timeout based on the current time
* stamp and removes them from tracking.
*
* @return The
*/
public Collection<T> getEventTimeouts() {
long curTime = getCurTime();
List<T> toUpdate;
synchronized (this.timeoutLock) {
toUpdate = this.eventTimeouts.entrySet().stream()
.filter(e -> e.getValue() < curTime)
.map(e -> e.getKey())
.collect(Collectors.toList());
this.eventTimeouts.keySet().removeAll(toUpdate);
}
return toUpdate;
}
/**
* Returns all currently tracked events despite timeout. This method removes
* all events from tracking.
*
* @return All currently tracked events.
*/
public Collection<T> flushEvents() {
synchronized (this.timeoutLock) {
List<T> toRet = new ArrayList<>(eventTimeouts.keySet());
eventTimeouts.clear();
return toRet;
}
}
}

View File

@ -0,0 +1,85 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.datamodel.events;
import java.util.Objects;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
/**
* An event to signal that an item in the tree has been
* added or changed.
*/
public class TreeEvent implements DAOEvent {
private final TreeItemDTO<?> itemRecord; // the updated item
private final boolean refreshRequired; // true if tree should request new data from DAO
/**
* @param itemRecord The updated item
* @param rereshRequired True if the tree should go to the DAO for updated data
*/
public TreeEvent(TreeItemDTO<?> itemRecord, boolean refreshRequired) {
this.itemRecord = itemRecord;
this.refreshRequired = refreshRequired;
}
public TreeItemDTO<?> getItemRecord() {
return itemRecord;
}
public boolean isRefreshRequired() {
return refreshRequired;
}
@Override
public int hashCode() {
int hash = 7;
hash = 89 * hash + Objects.hashCode(this.itemRecord);
hash = 89 * hash + (this.refreshRequired ? 1 : 0);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TreeEvent other = (TreeEvent) obj;
if (this.refreshRequired != other.refreshRequired) {
return false;
}
if (!Objects.equals(this.itemRecord, other.itemRecord)) {
return false;
}
return true;
}
@Override
public Type getType() {
return Type.TREE;
}
}

View File

@ -85,8 +85,7 @@ public class AnalysisResultNode extends ArtifactNode<AnalysisResult, AnalysisRes
@Override
public Optional<AbstractFile> getExtractArchiveWithPasswordActionFile() {
Optional<Content> optionalSourceContent = getSourceContent();
// GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!-----
// See JIRA-8099
// TODO: See JIRA-8099
boolean encryptionDetected = false;
if(optionalSourceContent.isPresent()) {
if (optionalSourceContent.get() instanceof AbstractFile) {

View File

@ -21,27 +21,22 @@ package org.sleuthkit.autopsy.mainui.nodes;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordSearchTermParams;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordMatchParams;
import com.google.common.collect.ImmutableSet;
import java.beans.PropertyChangeEvent;
import java.util.Comparator;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
import org.sleuthkit.datamodel.TskData;
/**
@ -49,6 +44,9 @@ import org.sleuthkit.datamodel.TskData;
*/
public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSearchParam> {
private final static Comparator<String> STRING_COMPARATOR = Comparator.nullsFirst(Comparator.naturalOrder());
@SuppressWarnings("deprecation")
private static Set<Integer> SET_TREE_ARTIFACTS = ImmutableSet.of(
BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID(),
BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(),
@ -85,9 +83,9 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
@Override
protected TreeNode<AnalysisResultSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> rowData) {
if (SET_TREE_ARTIFACTS.contains(rowData.getTypeData().getArtifactType().getTypeID())) {
return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null));
} else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getTypeData().getArtifactType())) {
if (SET_TREE_ARTIFACTS.contains(rowData.getSearchParams().getArtifactType().getTypeID())) {
return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getSearchParams().getArtifactType(), dataSourceId, null));
} else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getSearchParams().getArtifactType())) {
return new TreeTypeNode(rowData, new KeywordSetFactory(dataSourceId));
} else {
return new AnalysisResultTypeTreeNode(rowData);
@ -95,71 +93,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
/**
* This is a stop gap measure until a different way of handling the
* closing of cases is worked out. Currently, remote events may be
* received for a case that is already closed.
*/
try {
Case.getCurrentCaseThrows();
/**
* Due to some unresolved issues with how cases are closed, it
* is possible for the event to have a null oldValue if the
* event is a remote event.
*/
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
if (null != event && Category.ANALYSIS_RESULT.equals(event.getBlackboardArtifactType().getCategory())
&& !(AnalysisResultDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) {
return true;
}
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
}
return false;
protected TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> getOrCreateRelevantChild(TreeEvent daoEvt) {
// GVDTODO
return null;
}
/**
* See if expected blackboard type matches event.
*
* @param expectedType The expected artifact type.
* @param evt The event.
*
* @return If the event is a data added event and contains the provided
* type.
*/
private static boolean isRefreshRequired(BlackboardArtifact.Type expectedType, PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
/**
* This is a stop gap measure until a different way of handling the
* closing of cases is worked out. Currently, remote events may be
* received for a case that is already closed.
*/
try {
Case.getCurrentCaseThrows();
/**
* Due to some unresolved issues with how cases are closed, it
* is possible for the event to have a null oldValue if the
* event is a remote event.
*/
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
// GVDTODO it may be necessary to have more fine-grained check for refresh here.
if (null != event && expectedType.equals(event.getBlackboardArtifactType())) {
return true;
}
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
}
return false;
@Override
public int compare(AnalysisResultSearchParam o1, AnalysisResultSearchParam o2) {
return o1.getArtifactType().getDisplayName().compareTo(o2.getArtifactType().getDisplayName());
}
/**
@ -173,14 +114,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
* @param itemData The data to display.
*/
public AnalysisResultTypeTreeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> itemData) {
super(itemData.getTypeData().getArtifactType().getTypeName(),
getIconPath(itemData.getTypeData().getArtifactType()),
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayAnalysisResult(this.getItemData().getTypeData());
dataResultPanel.displayAnalysisResult(this.getItemData().getSearchParams());
}
}
@ -195,8 +136,8 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
* @param itemData The data to display.
*/
public TreeTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> itemData, ChildFactory<?> childFactory) {
super(itemData.getTypeData().getArtifactType().getTypeName(),
getIconPath(itemData.getTypeData().getArtifactType()),
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData,
Children.create(childFactory, true),
getDefaultLookup(itemData));
@ -234,13 +175,19 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
return AnalysisResultTypeFactory.isRefreshRequired(artifactType, evt);
protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
return new TreeSetTypeNode(rowData);
}
@Override
protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
return new TreeSetTypeNode(rowData);
protected TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> getOrCreateRelevantChild(TreeEvent daoEvt) {
// GVDTODO
return null;
}
@Override
public int compare(AnalysisResultSetSearchParam o1, AnalysisResultSetSearchParam o2) {
return STRING_COMPARATOR.compare(o1.getSetName(), o2.getSetName());
}
}
@ -252,11 +199,11 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
/**
* Main constructor.
*
* @param itemData The data to display.
* @param itemData The data to display.
*/
public TreeSetTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> itemData) {
super(itemData.getTypeData().getArtifactType().getTypeName(),
getIconPath(itemData.getTypeData().getArtifactType()),
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData,
Children.LEAF,
getDefaultLookup(itemData));
@ -264,7 +211,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayAnalysisResultSet(this.getItemData().getTypeData());
dataResultPanel.displayAnalysisResultSet(this.getItemData().getSearchParams());
}
}
@ -291,15 +238,15 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
/**
* Main constructor.
*
* @param itemData The data to display.
* @param itemData The data to display.
*/
public KeywordSetNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> itemData) {
super(itemData.getTypeData().getArtifactType().getTypeName(),
getIconPath(itemData.getTypeData().getArtifactType()),
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData,
Children.create(new KeywordSearchTermFactory(itemData.getTypeData()), true),
Children.create(new KeywordSearchTermFactory(itemData.getSearchParams()), true),
getDefaultLookup(itemData));
}
}
}
/**
@ -330,8 +277,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
return AnalysisResultTypeFactory.isRefreshRequired(BlackboardArtifact.Type.TSK_KEYWORD_HIT, evt);
protected TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> getOrCreateRelevantChild(TreeEvent daoEvt) {
// GVDTODO
return null;
}
@Override
public int compare(KeywordSearchTermParams o1, KeywordSearchTermParams o2) {
return STRING_COMPARATOR.compare(o1.getSearchTerm(), o2.getSearchTerm());
}
}
@ -347,19 +300,19 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
* @param itemData The data for the search term.
*/
public KeywordSearchTermNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> itemData) {
super(itemData.getTypeData().getSearchTerm(),
super(itemData.getSearchParams().getSearchTerm(),
getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT),
itemData,
(itemData.getTypeData().hasChildren() || itemData.getTypeData().getSearchType() == TskData.KeywordSearchQueryType.REGEX
(itemData.getSearchParams().hasChildren() || itemData.getSearchParams().getSearchType() == TskData.KeywordSearchQueryType.REGEX
// for regex queries always create a subtree, even if there is only one child
? Children.create(new KeywordFoundMatchFactory(itemData.getTypeData()), true)
? Children.create(new KeywordFoundMatchFactory(itemData.getSearchParams()), true)
: Children.LEAF),
getDefaultLookup(itemData));
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
KeywordSearchTermParams searchTermParams = this.getItemData().getTypeData();
KeywordSearchTermParams searchTermParams = this.getItemData().getSearchParams();
if (!searchTermParams.hasChildren()) {
KeywordHitSearchParam searchParams = new KeywordHitSearchParam(searchTermParams.getDataSourceId(),
@ -407,8 +360,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
return AnalysisResultTypeFactory.isRefreshRequired(BlackboardArtifact.Type.TSK_KEYWORD_HIT, evt);
protected TreeResultsDTO.TreeItemDTO<? extends KeywordMatchParams> getOrCreateRelevantChild(TreeEvent daoEvt) {
// GVDTODO
return null;
}
@Override
public int compare(KeywordMatchParams o1, KeywordMatchParams o2) {
return STRING_COMPARATOR.compare(o1.getKeywordMatch(), o2.getKeywordMatch());
}
}
@ -424,7 +383,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
* @param itemData The data for the match parameters.
*/
public KeywordFoundMatchNode(TreeResultsDTO.TreeItemDTO<? extends KeywordMatchParams> itemData) {
super(itemData.getTypeData().getKeywordMatch(),
super(itemData.getSearchParams().getKeywordMatch(),
getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT),
itemData,
Children.LEAF,
@ -433,7 +392,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSe
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
KeywordMatchParams searchParams = this.getItemData().getTypeData();
KeywordMatchParams searchParams = this.getItemData().getSearchParams();
dataResultPanel.displayKeywordHits(new KeywordHitSearchParam(
searchParams.getDataSourceId(),
searchParams.getSetName(),

View File

@ -125,7 +125,7 @@ public abstract class ArtifactNode<T extends BlackboardArtifact, R extends Artif
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return rowData.getSrcContent() instanceof AbstractFile;
}

View File

@ -138,7 +138,7 @@ public final class BlackboardArtifactTagNode extends BaseNode<SearchResultsDTO,
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return true;
}

View File

@ -1,4 +1,7 @@
AnalysisResultTypeFactory_adHocName=Adhoc Results
DataArtifactTypeFactory_AccountTypeParentNode_displayName=Communcation Accounts
FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files
FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content
ImageNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files
SearchResultRootNode_createSheet_childCount_displayName=Child Count
SearchResultRootNode_createSheet_childCount_name=Child Count

View File

@ -119,7 +119,7 @@ public final class ContentTagNode extends BaseNode<SearchResultsDTO, ContentTags
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return true;
}

View File

@ -20,11 +20,8 @@ package org.sleuthkit.autopsy.mainui.nodes;
import java.beans.PropertyChangeEvent;
import java.util.concurrent.ExecutionException;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.datamodel.Content;
/**
* Provides a generic interface to perform searches and determine if refreshes
@ -58,13 +55,12 @@ public abstract class DAOFetcher<P> {
*
* @param pageSize The number of items per page.
* @param pageIdx The page index.
* @param hardRefresh Whether or not to perform a hard refresh.
*
* @return The retrieved data.
*
* @throws ExecutionException
*/
public abstract SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException;
public abstract SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException;
/**
* Returns true if the ingest module event will require a refresh in the
@ -74,49 +70,5 @@ public abstract class DAOFetcher<P> {
*
* @return True if the
*/
public abstract boolean isRefreshRequired(PropertyChangeEvent evt);
/**
* Returns the content from the ModuleContentEvent. If the event does not
* contain a ModuleContentEvent or the event does not contain Content, null
* is returned.
*
* @param evt The event
*
* @return The inner content or null if no content.
*/
protected Content getContentFromEvt(PropertyChangeEvent evt) {
String eventName = evt.getPropertyName();
if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName)
&& (evt.getOldValue() instanceof ModuleContentEvent)
&& ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) {
return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource();
} else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName)
&& (evt.getNewValue() instanceof Content)) {
return (Content) evt.getNewValue();
} else {
return null;
}
}
/**
* Returns the ModuleDataEvent in the event if there is a child
* ModuleDataEvent. If not, null is returned.
*
* @param evt The event.
*
* @return The inner ModuleDataEvent or null.
*/
protected ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) {
String eventName = evt.getPropertyName();
if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName)
&& (evt.getOldValue() instanceof ModuleDataEvent)) {
return (ModuleDataEvent) evt.getOldValue();
} else {
return null;
}
}
public abstract boolean isRefreshRequired(DAOEvent evt);
}

View File

@ -18,26 +18,28 @@
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.beans.PropertyChangeEvent;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.openide.nodes.Children;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
import org.sleuthkit.autopsy.datamodel.utils.IconsUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO;
import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
/**
* Factory for displaying data artifact types in the tree.
*/
public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearchParam> {
private final Long dataSourceId;
/**
@ -49,65 +51,185 @@ public class DataArtifactTypeFactory extends TreeChildFactory<DataArtifactSearch
this.dataSourceId = dataSourceId;
}
@Override
protected TreeResultsDTO<? extends DataArtifactSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactCounts(dataSourceId);
}
@Override
protected TreeNode<DataArtifactSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> rowData) {
return new DataArtifactTypeTreeNode(rowData);
}
@Override
public boolean isRefreshRequired(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
/**
* This is a stop gap measure until a different way of handling the
* closing of cases is worked out. Currently, remote events may be
* received for a case that is already closed.
*/
try {
Case.getCurrentCaseThrows();
/**
* Due to some unresolved issues with how cases are closed, it
* is possible for the event to have a null oldValue if the
* event is a remote event.
*/
final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue();
if (null != event && Category.DATA_ARTIFACT.equals(event.getBlackboardArtifactType().getCategory())
&& !(DataArtifactDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) {
return true;
}
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
}
if (rowData.getSearchParams().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) {
return new AccountTypeParentNode(rowData, this.dataSourceId);
} else {
return new DataArtifactTypeTreeNode(rowData);
}
return false;
}
@Override
protected TreeItemDTO<DataArtifactSearchParam> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeItemDTO<DataArtifactSearchParam> originalTreeItem = super.getTypedTreeItem(treeEvt, DataArtifactSearchParam.class);
if (originalTreeItem != null
&& !DataArtifactDAO.getIgnoredTreeTypes().contains(originalTreeItem.getSearchParams().getArtifactType())
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams();
return new TreeItemDTO<>(
BlackboardArtifact.Category.DATA_ARTIFACT.name(),
new DataArtifactSearchParam(searchParam.getArtifactType(), this.dataSourceId),
searchParam.getArtifactType().getTypeID(),
MainDAO.getInstance().getDataArtifactsDAO().getDisplayName(searchParam.getArtifactType()),
originalTreeItem.getDisplayCount());
}
return null;
}
@Override
public int compare(DataArtifactSearchParam o1, DataArtifactSearchParam o2) {
DataArtifactDAO dao = MainDAO.getInstance().getDataArtifactsDAO();
return dao.getDisplayName(o1.getArtifactType()).compareToIgnoreCase(dao.getDisplayName(o2.getArtifactType()));
}
private static String getIconPath(BlackboardArtifact.Type artType) {
String iconPath = IconsUtil.getIconFilePath(artType.getTypeID());
return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath;
}
/**
* Display name and count of a data artifact type in the tree.
*/
public static class DataArtifactTypeTreeNode extends TreeNode<DataArtifactSearchParam> {
private static String getIconPath(BlackboardArtifact.Type artType) {
String iconPath = IconsUtil.getIconFilePath(artType.getTypeID());
return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath;
}
public DataArtifactTypeTreeNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData) {
super(itemData.getTypeData().getArtifactType().getTypeName(),
getIconPath(itemData.getTypeData().getArtifactType()),
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayDataArtifact(this.getItemData().getTypeData());
dataResultPanel.displayDataArtifact(this.getItemData().getSearchParams());
}
}
/**
* The account node that has nested children of account types.
*/
@Messages({
"DataArtifactTypeFactory_AccountTypeParentNode_displayName=Communcation Accounts"
})
static class AccountTypeParentNode extends TreeNode<DataArtifactSearchParam> {
/**
* Sets correct title (not using artifact type display name).
*
* @param itemData The item data.
*
* @return The updated data.
*/
private static TreeItemDTO<? extends DataArtifactSearchParam> createTitledData(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData) {
return new TreeItemDTO<>(
itemData.getTypeId(),
itemData.getSearchParams(),
itemData.getId(),
Bundle.DataArtifactTypeFactory_AccountTypeParentNode_displayName(),
itemData.getDisplayCount()
);
}
/**
* Main constructor.
*
* @param itemData The data to display.
* @param dataSourceId The data source id to filter on or null if no
* data source filter.
*/
public AccountTypeParentNode(TreeResultsDTO.TreeItemDTO<? extends DataArtifactSearchParam> itemData, Long dataSourceId) {
super(itemData.getSearchParams().getArtifactType().getTypeName(),
getIconPath(itemData.getSearchParams().getArtifactType()),
createTitledData(itemData),
Children.create(new AccountTypeFactory(dataSourceId), true),
getDefaultLookup(itemData)
);
}
@Override
protected void updateDisplayName(TreeItemDTO<? extends DataArtifactSearchParam> prevData, TreeItemDTO<? extends DataArtifactSearchParam> curData) {
super.updateDisplayName(prevData, createTitledData(curData));
}
}
/**
* Factory for displaying account types.
*/
static class AccountTypeFactory extends TreeChildFactory<CommAccountsSearchParams> {
private final Long dataSourceId;
/**
* Main constructor.
*
* @param dataSourceId The data source object id for which the results
* should be filtered or null if no data source
* filtering.
*/
public AccountTypeFactory(Long dataSourceId) {
this.dataSourceId = dataSourceId;
}
@Override
protected TreeResultsDTO<? extends CommAccountsSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
return MainDAO.getInstance().getCommAccountsDAO().getAccountsCounts(this.dataSourceId);
}
@Override
protected TreeNode<CommAccountsSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends CommAccountsSearchParams> rowData) {
return new AccountTypeNode(rowData);
}
@Override
protected TreeItemDTO<? extends CommAccountsSearchParams> getOrCreateRelevantChild(TreeEvent treeEvt) {
TreeItemDTO<CommAccountsSearchParams> originalTreeItem = getTypedTreeItem(treeEvt, CommAccountsSearchParams.class);
if (originalTreeItem != null
&& (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) {
CommAccountsSearchParams searchParam = originalTreeItem.getSearchParams();
return TreeChildFactory.createTreeItemDTO(originalTreeItem,
new CommAccountsSearchParams(searchParam.getType(), this.dataSourceId));
}
return null;
}
@Override
public int compare(CommAccountsSearchParams o1, CommAccountsSearchParams o2) {
return o1.getType().getDisplayName().compareToIgnoreCase(o2.getType().getDisplayName());
}
}
/**
* A node representing a single account type in the tree.
*/
static class AccountTypeNode extends TreeNode<CommAccountsSearchParams> {
/**
* Main constructor.
*
* @param itemData The data to display.
*/
public AccountTypeNode(TreeResultsDTO.TreeItemDTO<? extends CommAccountsSearchParams> itemData) {
super(itemData.getSearchParams().getType().getTypeName(),
Accounts.getIconFilePath(itemData.getSearchParams().getType()),
itemData);
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayAccounts(super.getItemData().getSearchParams());
}
}
}

View File

@ -39,6 +39,7 @@ public class DirectoryNode extends BaseNode<SearchResultsDTO, DirectoryRowDTO> {
*/
public DirectoryNode(SearchResultsDTO results, DirectoryRowDTO row) {
super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row);
setName(ContentNodeUtil.getContentName(row.getContent().getId()));
setDisplayName(row.getContent().getName());
setShortDescription(row.getContent().getName());
setIcon();
@ -52,9 +53,9 @@ public class DirectoryNode extends BaseNode<SearchResultsDTO, DirectoryRowDTO> {
private void setIcon() {
// set name, display name, and icon
if (getRowDTO().getContent().isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); //NON-NLS
this.setIconBaseWithExtension(NodeIconUtil.DELETED_FOLDER.getPath()); //NON-NLS
} else {
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS
this.setIconBaseWithExtension(NodeIconUtil.FOLDER.getPath()); //NON-NLS
}
}
@ -69,7 +70,7 @@ public class DirectoryNode extends BaseNode<SearchResultsDTO, DirectoryRowDTO> {
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return true;
}

View File

@ -29,7 +29,7 @@ import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType;
import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO;
import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext;
@ -47,44 +47,6 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
*/
public class FileNode extends AbstractNode implements ActionContext {
/**
* Gets the path to the icon file that should be used to visually represent
* an AbstractFile, using the file name extension to select the icon.
*
* @param file An AbstractFile.
*
* @return An icon file path.
*/
static String getIconForFileType(ExtensionMediaType fileType) {
if (fileType == null) {
return "org/sleuthkit/autopsy/images/file-icon.png";
}
switch (fileType) {
case IMAGE:
return "org/sleuthkit/autopsy/images/image-file.png";
case VIDEO:
return "org/sleuthkit/autopsy/images/video-file.png";
case AUDIO:
return "org/sleuthkit/autopsy/images/audio-file.png";
case DOC:
return "org/sleuthkit/autopsy/images/doc-file.png";
case EXECUTABLE:
return "org/sleuthkit/autopsy/images/exe-file.png";
case TEXT:
return "org/sleuthkit/autopsy/images/text-file.png";
case WEB:
return "org/sleuthkit/autopsy/images/web-file.png";
case PDF:
return "org/sleuthkit/autopsy/images/pdf-file.png";
case ARCHIVE:
return "org/sleuthkit/autopsy/images/archive-file.png";
default:
case UNCATEGORIZED:
return "org/sleuthkit/autopsy/images/file-icon.png";
}
}
private final boolean directoryBrowseMode;
private final FileRowDTO fileData;
private final List<ColumnKey> columns;
@ -97,8 +59,8 @@ public class FileNode extends AbstractNode implements ActionContext {
// GVDTODO: at some point, this leaf will need to allow for children
super(Children.LEAF, ContentNodeUtil.getLookup(file.getAbstractFile()));
setIcon(file);
setDisplayName(ContentNodeUtil.getContentDisplayName(file.getFileName()));
setName(ContentNodeUtil.getContentName(file.getId()));
setDisplayName(ContentNodeUtil.getContentDisplayName(file.getFileName()));
setShortDescription(ContentNodeUtil.getContentDisplayName(file.getFileName()));
this.directoryBrowseMode = directoryBrowseMode;
this.fileData = file;
@ -116,7 +78,7 @@ public class FileNode extends AbstractNode implements ActionContext {
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS
}
} else {
this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType()));
this.setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(fileData.getExtensionMediaType()));
}
}
@ -146,7 +108,7 @@ public class FileNode extends AbstractNode implements ActionContext {
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return true;
}
@ -166,8 +128,7 @@ public class FileNode extends AbstractNode implements ActionContext {
@Override
public Optional<AbstractFile> getExtractArchiveWithPasswordActionFile() {
// GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!-----
// See JIRA-8099
// TODO: See JIRA-8099
AbstractFile file = this.fileData.getAbstractFile();
boolean isArchive = FileTypeExtensions.getArchiveExtensions().contains("." + file.getNameExtension().toLowerCase());
boolean encryptionDetected = false;
@ -207,17 +168,17 @@ public class FileNode extends AbstractNode implements ActionContext {
LayoutFile lf = ((LayoutFileRowDTO) fileData).getLayoutFile();
switch (lf.getType()) {
case CARVED:
setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png");
setIconBaseWithExtension(NodeIconUtil.CARVED_FILE.getPath());
break;
case LAYOUT_FILE:
if (lf.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png");
setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath());
} else {
setIconBaseWithExtension(getIconForFileType(layoutFileRow.getExtensionMediaType()));
setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(layoutFileRow.getExtensionMediaType()));
}
break;
default:
setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png");
setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath());
}
}
}
@ -236,12 +197,12 @@ public class FileNode extends AbstractNode implements ActionContext {
AbstractFile file = fileData.getAbstractFile();
if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
if (file.getType().equals(TSK_DB_FILES_TYPE_ENUM.CARVED)) {
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); //NON-NLS
this.setIconBaseWithExtension(NodeIconUtil.CARVED_FILE.getPath()); //NON-NLS
} else {
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS
this.setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); //NON-NLS
}
} else {
this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType()));
this.setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(fileData.getExtensionMediaType()));
}
}
}

View File

@ -0,0 +1,563 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.util.Optional;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javax.swing.Action;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction;
import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
import org.sleuthkit.autopsy.mainui.datamodel.FileSystemColumnUtils;
import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.CARVED_FILE;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FILE;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FOLDER;
import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FOLDER;
import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext;
import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalDirectory;
import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.VirtualDirectory;
import org.sleuthkit.datamodel.Volume;
/**
* Factory for displaying content in the data source section of the tree.
*/
public class FileSystemFactory extends TreeChildFactory<FileSystemContentSearchParam> {
private static final Logger logger = Logger.getLogger(FileSystemFactory.class.getName());
private Long contentId = null;
private Host host = null;
/**
* Create a factory for a given parent content ID.
*
* @param contentId The object ID for this node
*/
public FileSystemFactory(Long contentId) {
this.contentId = contentId;
}
/**
* Create a factory for a given parent Host.
*
* @param host The parent host for this node
*/
public FileSystemFactory(Host host) {
this.host = host;
}
@Override
protected TreeResultsDTO<? extends FileSystemContentSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
if (host == null) {
TreeResultsDTO<? extends FileSystemContentSearchParam> results = MainDAO.getInstance().getFileSystemDAO().getDisplayableContentChildren(contentId);
return results;
} else {
TreeResultsDTO<? extends FileSystemContentSearchParam> results = MainDAO.getInstance().getFileSystemDAO().getDataSourcesForHost(host);
return results;
}
}
@Override
protected TreeNode<FileSystemContentSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> rowData) {
try {
Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(rowData.getSearchParams().getContentObjectId());
if (content instanceof Image) {
return new ImageTreeNode((Image) content, rowData);
} else if (content instanceof Volume) {
return new VolumeTreeNode((Volume) content, rowData);
} else if (content instanceof Pool) {
return new PoolTreeNode((Pool) content, rowData);
} else if (content instanceof LocalFilesDataSource) {
return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) content, rowData);
} else if (content instanceof LocalDirectory) {
return new LocalDirectoryTreeNode((LocalDirectory) content, rowData);
} else if (content instanceof VirtualDirectory) {
return new VirtualDirectoryTreeNode((VirtualDirectory) content, rowData);
} else if (content instanceof Volume) {
return new VolumeTreeNode((Volume) content, rowData);
} else if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (file.isDir()) {
return new DirectoryTreeNode(file, rowData);
} else {
return new FileTreeNode(file, rowData);
}
} else {
return new UnsupportedTreeNode(content, rowData);
}
} catch (NoCurrentCaseException ex) {
// Case was likely closed while nodes were being created - don't fill the log with errors.
return null;
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating new node for content with ID: " + rowData.getSearchParams().getContentObjectId(), ex);
return null;
}
}
@Override
protected TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> getOrCreateRelevantChild(TreeEvent treeEvt) {
// GVDTODO
return null;
}
@Override
public int compare(FileSystemContentSearchParam o1, FileSystemContentSearchParam o2) {
// GVDTODO
return 0;
}
/**
* This factory is used to produce the single data source node under "Data
* Source Files" when grouping by person/host is selected.
*/
public static class DataSourceFactory extends TreeChildFactory<FileSystemContentSearchParam> {
private final long dataSourceId;
/**
* Create the factory for a given data source object ID.
*
* @param dataSourceId The data source object ID.
*/
public DataSourceFactory(long dataSourceId) {
this.dataSourceId = dataSourceId;
}
@Override
protected TreeResultsDTO<? extends FileSystemContentSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
// We're not really getting children here, just creating a node for the data source itself.
return MainDAO.getInstance().getFileSystemDAO().getSingleDataSource(dataSourceId);
}
@Override
protected TreeNode<FileSystemContentSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> rowData) {
try {
DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceId);
if (ds instanceof Image) {
return new ImageTreeNode((Image) ds, rowData);
} else if (ds instanceof LocalFilesDataSource) {
return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) ds, rowData);
} else {
logger.log(Level.SEVERE, "Unexpected data source type (ID: {0})", dataSourceId);
return null;
}
} catch (NoCurrentCaseException ex) {
// Case is likely closing
return null;
} catch (TskCoreException | TskDataException ex) {
logger.log(Level.SEVERE, "Error creating node from data source with ID: " + dataSourceId, ex);
return null;
}
}
@Override
protected TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> getOrCreateRelevantChild(TreeEvent treeEvt) {
// GVDTODO
return null;
}
@Override
public int compare(FileSystemContentSearchParam o1, FileSystemContentSearchParam o2) {
// GVDTODO
return 0;
}
}
/**
* Display name and count of a file system node in the tree.
*/
@NbBundle.Messages({
"FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files"})
public abstract static class FileSystemTreeNode extends TreeNode<FileSystemContentSearchParam> implements ActionContext {
protected FileSystemTreeNode(String icon, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData, Children children, Lookup lookup) {
super(ContentNodeUtil.getContentName(itemData.getSearchParams().getContentObjectId()), icon, itemData, children, lookup);
}
protected static Children createChildrenForContent(Long contentId) {
try {
if (FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId).isEmpty()) {
return Children.LEAF;
} else {
return Children.create(new FileSystemFactory(contentId), true);
}
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error creating children for content with ID: " + contentId, ex);
return Children.LEAF;
} catch (NoCurrentCaseException ex) {
return Children.LEAF;
}
}
@Override
public void respondSelection(DataResultTopComponent dataResultPanel) {
dataResultPanel.displayFileSystemContent(this.getItemData().getSearchParams());
}
public abstract Node clone();
@Override
public Action[] getActions(boolean context) {
return ActionsFactory.getActions(this);
}
}
static class ImageTreeNode extends FileSystemTreeNode {
Image image;
ImageTreeNode(Image image, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(NodeIconUtil.IMAGE.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(image));
this.image = image;
}
public Node clone() {
return new ImageTreeNode(image, getItemData());
}
@Override
public Optional<ActionsFactory.ActionGroup> getNodeSpecificActions() {
ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup();
group.add(new ExtractUnallocAction(
Bundle.FileSystemFactory_FileSystemTreeNode_ExtractUnallocAction_text(), image));
return Optional.of(group);
}
@Override
public Optional<Content> getDataSourceForActions() {
return Optional.of(image);
}
@Override
public boolean supportsSourceContentViewerActions() {
return true;
}
}
static class VolumeTreeNode extends FileSystemTreeNode {
Volume volume;
VolumeTreeNode(Volume volume, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(NodeIconUtil.VOLUME.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(volume));
this.volume = volume;
}
public Node clone() {
return new VolumeTreeNode(volume, getItemData());
}
@Override
public Optional<ActionsFactory.ActionGroup> getNodeSpecificActions() {
ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup();
group.add(new ExtractUnallocAction(
Bundle.VolumnNode_ExtractUnallocAction_text(), volume));
group.add(new FileSystemDetailsAction(volume));
return Optional.of(group);
}
@Override
public boolean supportsSourceContentViewerActions() {
return true;
}
}
static class PoolTreeNode extends FileSystemTreeNode {
Pool pool;
PoolTreeNode(Pool pool, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(NodeIconUtil.VOLUME.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(pool));
this.pool = pool;
}
public Node clone() {
return new PoolTreeNode(pool, getItemData());
}
}
static class DirectoryTreeNode extends FileSystemTreeNode {
AbstractFile dir;
DirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(getDirectoryIcon(dir),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(dir));
this.dir = dir;
}
private static String getDirectoryIcon(AbstractFile dir) {
if (dir.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
return DELETED_FOLDER.getPath();
} else {
return FOLDER.getPath();
}
}
public Node clone() {
return new DirectoryTreeNode(dir, getItemData());
}
@Override
public boolean supportsViewInTimeline() {
return true;
}
@Override
public Optional<AbstractFile> getFileForViewInTimelineAction() {
return Optional.of(dir);
}
@Override
public boolean supportsTreeExtractActions() {
return true;
}
@Override
public Optional<Content> getContentForRunIngestionModuleAction() {
return Optional.of(dir);
}
@Override
public boolean supportsContentTagAction() {
return true;
}
}
static abstract class SpecialDirectoryTreeNode extends FileSystemTreeNode {
AbstractFile dir;
protected SpecialDirectoryTreeNode(AbstractFile dir, String icon, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData, Children children, Lookup lookup) {
super(icon, itemData, children, lookup);
this.dir = dir;
}
@Override
public boolean supportsSourceContentViewerActions() {
return true;
}
@Override
public boolean supportsTreeExtractActions() {
return true;
}
@Override
public Optional<Content> getContentForRunIngestionModuleAction() {
return Optional.of(dir);
}
}
static class LocalDirectoryTreeNode extends SpecialDirectoryTreeNode {
LocalDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(dir,
NodeIconUtil.FOLDER.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(dir));
}
public Node clone() {
return new LocalDirectoryTreeNode(dir, getItemData());
}
@Override
public boolean supportsContentTagAction() {
return true;
}
}
static class LocalFilesDataSourceTreeNode extends SpecialDirectoryTreeNode {
LocalFilesDataSourceTreeNode(AbstractFile localFilesDataSource, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(localFilesDataSource,
NodeIconUtil.VOLUME.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(localFilesDataSource));
}
public Node clone() {
return new LocalFilesDataSourceTreeNode(dir, getItemData());
}
@Override
public Optional<Content> getDataSourceForActions() {
return Optional.of(dir);
}
}
static class VirtualDirectoryTreeNode extends SpecialDirectoryTreeNode {
VirtualDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(dir,
NodeIconUtil.VIRTUAL_DIRECTORY.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(dir));
}
public Node clone() {
return new VirtualDirectoryTreeNode(dir, getItemData());
}
}
static class FileTreeNode extends FileSystemTreeNode {
AbstractFile file;
FileTreeNode(AbstractFile file, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(getFileIcon(file),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
ContentNodeUtil.getLookup(file));
this.file = file;
}
public Node clone() {
return new FileTreeNode(file, getItemData());
}
private static String getFileIcon(AbstractFile file) {
if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.CARVED)) {
return CARVED_FILE.getPath();
} else {
return DELETED_FILE.getPath();
}
} else {
MediaTypeUtils.ExtensionMediaType mediaType = MediaTypeUtils.getExtensionMediaType(file.getNameExtension());
return MediaTypeUtils.getIconForFileType(mediaType);
}
}
@Override
public boolean supportsViewInTimeline() {
return true;
}
@Override
public Optional<AbstractFile> getFileForViewInTimelineAction() {
return Optional.of(file);
}
@Override
public boolean supportsSourceContentViewerActions() {
return true;
}
@Override
public Optional<Node> getNewWindowActionNode() {
return Optional.of(this);
}
@Override
public Optional<Node> getExternalViewerActionNode() {
return Optional.of(this);
}
@Override
public boolean supportsTreeExtractActions() {
return true;
}
@Override
public boolean supportsContentTagAction() {
return true;
}
@Override
public Optional<AbstractFile> getFileForDirectoryBrowseMode() {
return Optional.of(file);
}
@Override
public Optional<AbstractFile> getExtractArchiveWithPasswordActionFile() {
// TODO: See JIRA-8099
boolean isArchive = FileTypeExtensions.getArchiveExtensions().contains("." + file.getNameExtension().toLowerCase());
boolean encryptionDetected = false;
try {
encryptionDetected = isArchive && file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED).size() > 0;
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error loading artifacts for file with ID: " + file.getId(), ex);
}
return encryptionDetected ? Optional.of(file) : Optional.empty();
}
}
@NbBundle.Messages({
"FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content",})
static class UnsupportedTreeNode extends FileSystemTreeNode {
Content content;
UnsupportedTreeNode(Content content, TreeResultsDTO.TreeItemDTO<? extends FileSystemContentSearchParam> itemData) {
super(NodeIconUtil.FILE.getPath(),
itemData,
createChildrenForContent(itemData.getSearchParams().getContentObjectId()),
getDefaultLookup(itemData));
this.content = content;
}
public Node clone() {
return new UnsupportedTreeNode(content, getItemData());
}
}
}

View File

@ -41,9 +41,10 @@ public class ImageNode extends BaseNode<SearchResultsDTO, ImageRowDTO> {
*/
public ImageNode(SearchResultsDTO results, ImageRowDTO row) {
super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row);
setName(ContentNodeUtil.getContentName(row.getContent().getId()));
setDisplayName(row.getContent().getName());
setShortDescription(row.getContent().getName());
setIconBaseWithExtension("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); //NON-NLS
setIconBaseWithExtension(NodeIconUtil.IMAGE.getPath()); //NON-NLS
}
@NbBundle.Messages({

View File

@ -0,0 +1,74 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.sleuthkit.autopsy.mainui.nodes;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.Volume;
/**
* Consolidates node paths shared between the result view table and the tree.
*/
class NodeIconUtil {
final static NodeIconUtil FOLDER = new NodeIconUtil("org/sleuthkit/autopsy/images/Folder-icon.png");
final static NodeIconUtil DELETED_FOLDER = new NodeIconUtil("org/sleuthkit/autopsy/images/folder-icon-deleted.png");
final static NodeIconUtil VIRTUAL_DIRECTORY = new NodeIconUtil("org/sleuthkit/autopsy/images/folder-icon-virtual.png");
final static NodeIconUtil CARVED_FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png");
final static NodeIconUtil DELETED_FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/file-icon-deleted.png");
final static NodeIconUtil IMAGE = new NodeIconUtil("org/sleuthkit/autopsy/images/hard-drive-icon.jpg");
final static NodeIconUtil VOLUME = new NodeIconUtil("org/sleuthkit/autopsy/images/vol-icon.png");
final static NodeIconUtil POOL = new NodeIconUtil("org/sleuthkit/autopsy/images/pool-icon.png");
final static NodeIconUtil FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/file-icon.png");
final static NodeIconUtil LOCAL_FILES_DATA_SOURCE = new NodeIconUtil("org/sleuthkit/autopsy/images/fileset-icon-16.png");
//final static NodeIconUtil = new NodeIconUtil("");
private final String iconPath;
private NodeIconUtil(String path) {
this.iconPath = path;
}
String getPath() {
return iconPath;
}
public static String getPathForContent(Content c) {
if (c instanceof Image) {
return IMAGE.getPath();
} else if (c instanceof LocalFilesDataSource) {
return LOCAL_FILES_DATA_SOURCE.getPath();
} else if (c instanceof Volume) {
return VOLUME.getPath();
} else if (c instanceof Pool) {
return POOL.getPath();
} else if (c instanceof AbstractFile) {
AbstractFile file = (AbstractFile) c;
if (((AbstractFile) c).isDir()) {
if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
return DELETED_FOLDER.getPath();
} else {
return FOLDER.getPath();
}
} else {
if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) {
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.CARVED)) {
return CARVED_FILE.getPath();
} else {
return DELETED_FILE.getPath();
}
} else {
return FILE.getPath();
}
}
}
return FILE.getPath();
}
}

View File

@ -39,7 +39,7 @@ public class OsAccountNode extends BaseNode<SearchResultsDTO, OsAccountRowDTO>{
results,
rowData);
String name = rowData.getContent().getName();
setName(name);
setName(ContentNodeUtil.getContentName(rowData.getContent().getId()));
setDisplayName(name);
setShortDescription(name);
setIconBaseWithExtension(ICON_PATH);
@ -56,7 +56,7 @@ public class OsAccountNode extends BaseNode<SearchResultsDTO, OsAccountRowDTO>{
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return true;
}

View File

@ -41,8 +41,9 @@ public class PoolNode extends BaseNode<SearchResultsDTO, PoolRowDTO> {
results, row);
String name = row.getContent().getType().getName();
setName(ContentNodeUtil.getContentName(row.getContent().getId()));
setDisplayName(name);
setShortDescription(name);
setIconBaseWithExtension("org/sleuthkit/autopsy/images/pool-icon.png");
setIconBaseWithExtension(NodeIconUtil.POOL.getPath());
}
}

View File

@ -18,9 +18,9 @@
*/
package org.sleuthkit.autopsy.mainui.nodes;
import java.beans.PropertyChangeEvent;
import java.text.MessageFormat;
import java.util.concurrent.ExecutionException;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
/**
@ -133,7 +133,7 @@ public class SearchManager {
*/
public synchronized SearchResultsDTO updatePageIdx(int pageIdx) throws IllegalArgumentException, ExecutionException {
setPageIdx(pageIdx);
return fetchResults(false);
return getResults();
}
/**
@ -171,11 +171,11 @@ public class SearchManager {
/**
* Determines if a refresh is required for the currently selected item.
*
* @param evt The ingest module event.
* @param evt The event.
*
* @return True if an update is required.
*/
public synchronized boolean isRefreshRequired(PropertyChangeEvent evt) {
public synchronized boolean isRefreshRequired(DAOEvent evt) {
return isRefreshRequired(this.daoFetcher, evt);
}
@ -183,11 +183,11 @@ public class SearchManager {
* Determines if a refresh is required for the currently selected item.
*
* @param dataFetcher The data fetcher.
* @param evt The ingest module event.
* @param evt The event.
*
* @return True if an update is required.
*/
private synchronized <P> boolean isRefreshRequired(DAOFetcher<P> dataFetcher, PropertyChangeEvent evt) {
private synchronized <P> boolean isRefreshRequired(DAOFetcher<P> dataFetcher, DAOEvent evt) {
if (dataFetcher == null) {
return false;
}
@ -195,17 +195,6 @@ public class SearchManager {
return dataFetcher.isRefreshRequired(evt);
}
/**
* Forces a refresh of data based on current search parameters.
*
* @return The refreshed data.
*
* @throws ExecutionException
*/
public synchronized SearchResultsDTO getRefreshedData() throws ExecutionException {
return fetchResults(true);
}
/**
* Queries the dao cache for results storing the result in the current
* search results.
@ -216,25 +205,13 @@ public class SearchManager {
* @throws ExecutionException
*/
public synchronized SearchResultsDTO getResults() throws IllegalArgumentException, ExecutionException {
return fetchResults(false);
return fetchResults(this.daoFetcher);
}
/**
* Fetches results using current page fetcher or returns null if no current
* page fetcher. Also stores current results in local variable.
*
* @return The current search results or null if no current page fetcher.
*
* @throws ExecutionException
*/
private synchronized SearchResultsDTO fetchResults(boolean hardRefresh) throws ExecutionException {
return fetchResults(this.daoFetcher, hardRefresh);
}
private synchronized SearchResultsDTO fetchResults(DAOFetcher<?> dataFetcher, boolean hardRefresh) throws ExecutionException {
private synchronized SearchResultsDTO fetchResults(DAOFetcher<?> dataFetcher) throws ExecutionException {
SearchResultsDTO newResults = null;
if (dataFetcher != null) {
newResults = dataFetcher.getSearchResults(this.pageSize, this.pageIdx, hardRefresh);
newResults = dataFetcher.getSearchResults(this.pageSize, this.pageIdx);
}
this.currentSearchResults = newResults;

View File

@ -43,6 +43,7 @@ abstract class SpecialDirectoryNode extends BaseNode<SearchResultsDTO, ContentRo
*/
private SpecialDirectoryNode(SearchResultsDTO results, ContentRowDTO<? extends SpecialDirectory> row) {
super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row);
setName(ContentNodeUtil.getContentName(row.getContent().getId()));
setDisplayName(row.getContent().getName());
setShortDescription(row.getContent().getName());
}
@ -58,7 +59,7 @@ abstract class SpecialDirectoryNode extends BaseNode<SearchResultsDTO, ContentRo
}
@Override
public boolean supportsExtractActions() {
public boolean supportsTableExtractActions() {
return true;
}
@ -67,11 +68,6 @@ abstract class SpecialDirectoryNode extends BaseNode<SearchResultsDTO, ContentRo
return Optional.of(getRowDTO().getContent());
}
@Override
public Optional<Content> getContentForFileSearchAction() {
return Optional.of(getRowDTO().getContent());
}
@Override
public Optional<Content> getDataSourceForActions() {
return getRowDTO().getContent().isDataSource()

View File

@ -22,83 +22,87 @@ import com.google.common.collect.MapMaker;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Node;
import org.openide.util.WeakListeners;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
import org.sleuthkit.autopsy.guiutils.RefreshThrottler.Refresher;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent;
import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent;
/**
* Factory for populating tree with results.
* Factory for populating child nodes in a tree based on TreeResultsDTO
*/
public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object> implements Refresher {
public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object> implements Comparator<T> {
private static final Logger logger = Logger.getLogger(TreeChildFactory.class.getName());
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST
= EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeNotify();
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
/**
* This is a stop gap measure until a different way of handling the
* closing of cases is worked out. Currently, remote events may be
* received for a case that is already closed.
*/
try {
Case.getCurrentCaseThrows();
refresh(false);
} catch (NoCurrentCaseException notUsed) {
/**
* Case is closed, do nothing.
*/
if (evt.getNewValue() instanceof DAOAggregateEvent) {
DAOAggregateEvent aggEvt = (DAOAggregateEvent) evt.getNewValue();
for (DAOEvent daoEvt : aggEvt.getEvents()) {
if (daoEvt instanceof TreeEvent) {
TreeEvent treeEvt = (TreeEvent) daoEvt;
TreeItemDTO<? extends T> item = getOrCreateRelevantChild(treeEvt);
if (item != null) {
if (treeEvt.isRefreshRequired()) {
update();
break;
} else {
updateNodeData(item);
}
}
}
}
}
};
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null);
private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, MainDAO.getInstance().getTreeEventsManager());
// maps the Node keys to the child TreeNode. Used to update existing Node with new counts
private final Map<Object, TreeNode<T>> typeNodeMap = new MapMaker().weakValues().makeMap();
private final Object resultsUpdateLock = new Object();
// Results of the last full load from the DAO. May not be complete because
// events will come in with more updated data.
private TreeResultsDTO<? extends T> curResults = null;
// All current child items (sorted). May have more items than curResults does because
// this is updated based on events and new data.
private List<TreeItemDTO<? extends T>> curItemsList = new ArrayList<>();
// maps the Node key (ID) to its DTO
private Map<Object, TreeItemDTO<? extends T>> idMapping = new HashMap<>();
@Override
protected boolean createKeys(List<Object> toPopulate) {
if (curResults == null) {
try {
updateData();
} catch (IllegalArgumentException | ExecutionException ex) {
logger.log(Level.WARNING, "An error occurred while fetching keys", ex);
return false;
List<TreeItemDTO<? extends T>> itemsList;
synchronized (resultsUpdateLock) {
// Load data from DAO if we haven't already
if (curResults == null) {
try {
updateData();
} catch (IllegalArgumentException | ExecutionException ex) {
logger.log(Level.WARNING, "An error occurred while fetching keys", ex);
return false;
}
}
// make copy to avoid concurrent modification
itemsList = new ArrayList<>(curItemsList);
}
// update existing cached nodes
List<Object> curResultIds = new ArrayList<>();
for (TreeItemDTO<? extends T> dto : curResults.getItems()) {
for (TreeItemDTO<? extends T> dto : itemsList) {
TreeNode<T> currentlyCached = typeNodeMap.get(dto.getId());
if (currentlyCached != null) {
currentlyCached.update(dto);
@ -122,25 +126,55 @@ public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object
}
/**
* Updates local data by fetching data from the DAO's.
* Finds and updates a node based on new/updated data.
*
* @param item The added/updated item.
*/
protected void updateNodeData(TreeItemDTO<? extends T> item) {
TreeNode<T> cachedTreeNode = this.typeNodeMap.get(item.getId());
if (cachedTreeNode == null) {
synchronized (resultsUpdateLock) {
// add to id mapping
this.idMapping.put(item.getId(), item);
// insert in sorted position
int insertIndex = 0;
for (; insertIndex < this.curItemsList.size(); insertIndex++) {
if (this.compare(item.getSearchParams(), this.curItemsList.get(insertIndex).getSearchParams()) < 0) {
break;
}
}
this.curItemsList.add(insertIndex, item);
}
this.refresh(false);
} else {
cachedTreeNode.update(item);
}
}
/**
* Updates local data structures by fetching new data from the DAO's.
*
* @throws IllegalArgumentException
* @throws ExecutionException
*/
protected void updateData() throws IllegalArgumentException, ExecutionException {
this.curResults = getChildResults();
this.idMapping = curResults.getItems().stream()
.collect(Collectors.toMap(item -> item.getId(), item -> item, (item1, item2) -> item1));
synchronized (resultsUpdateLock) {
this.curResults = getChildResults();
Map<Object, TreeItemDTO<? extends T>> idMapping = new HashMap<>();
List<TreeItemDTO<? extends T>> curItemsList = new ArrayList<>();
for (TreeItemDTO<? extends T> item : this.curResults.getItems()) {
idMapping.put(item.getId(), item);
curItemsList.add(item);
}
}
@Override
public void refresh() {
update();
this.idMapping = idMapping;
this.curItemsList = curItemsList;
}
}
/**
* Fetches child view from the database and updates the tree.
* Updates the tree using new data from the DAO.
*/
public void update() {
try {
@ -156,40 +190,41 @@ public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object
* Dispose resources associated with this factory.
*/
private void disposeResources() {
curResults = null;
typeNodeMap.clear();
idMapping.clear();
synchronized (resultsUpdateLock) {
curResults = null;
this.curItemsList.clear();
idMapping.clear();
}
}
/**
* Register listeners for autopsy events.
* Register listeners for DAO events.
*/
private void registerListeners() {
refreshThrottler.registerForIngestModuleEvents();
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, weakPcl);
Case.addEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
MainDAO.getInstance().getTreeEventsManager().addPropertyChangeListener(weakPcl);
}
/**
* Unregister listeners for autopsy events.
* Unregister listeners for DAO events.
*/
private void unregisterListeners() {
refreshThrottler.unregisterEventListener();
IngestManager.getInstance().removeIngestJobEventListener(weakPcl);
Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), weakPcl);
// GVDTODO this may not be necessary due to the weak listener's ability to unregister itself
MainDAO.getInstance().getTreeEventsManager().removePropertyChangeListener(weakPcl);
}
@Override
protected void removeNotify() {
disposeResources();
unregisterListeners();
disposeResources();
super.removeNotify();
}
@Override
protected void finalize() throws Throwable {
disposeResources();
unregisterListeners();
disposeResources();
super.finalize();
}
@ -199,6 +234,47 @@ public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object
super.addNotify();
}
/**
* A utility method that creates a TreeItemDTO using the data in 'original'
* for all fields except 'typeData' where 'updatedData' is used instead.
*
* @param original The original tree item dto.
* @param updatedData The new type data to use.
*
* @return The created tree item dto.
*/
static <T> TreeItemDTO<T> createTreeItemDTO(TreeItemDTO<T> original, T updatedData) {
return new TreeItemDTO<>(
original.getTypeId(),
updatedData,
original.getId(),
original.getDisplayName(),
original.getDisplayCount());
}
/**
* Returns the underlying tree item dto in the tree event if the search
* params of the tree item dto are of the expected type. Otherwise, returns
* null.
*
* @param treeEvt The tree event.
* @param expectedSearchParamsType The expected type of the search params of
* the tree item dto in the tree event.
*
* @return The typed tree item dto in the tree event or null if no match
* found.
*/
protected <T> TreeItemDTO<T> getTypedTreeItem(TreeEvent treeEvt, Class<T> expectedSearchParamsType) {
if (treeEvt != null && treeEvt.getItemRecord() != null && treeEvt.getItemRecord().getSearchParams() != null
&& expectedSearchParamsType.isAssignableFrom(treeEvt.getItemRecord().getSearchParams().getClass())) {
@SuppressWarnings("unchecked")
TreeItemDTO<T> originalTreeItem = (TreeItemDTO<T>) treeEvt.getItemRecord();
return originalTreeItem;
}
return null;
}
/**
* Creates a TreeNode given the tree item data.
*
@ -217,4 +293,15 @@ public abstract class TreeChildFactory<T> extends ChildFactory.Detachable<Object
* @throws ExecutionException
*/
protected abstract TreeResultsDTO<? extends T> getChildResults() throws IllegalArgumentException, ExecutionException;
/**
* Creates a child tree item dto that can be used to find the affected child
* node that requires updates.
*
* @param treeEvt The tree event.
*
* @return The tree item dto that can be used to find the child node
* affected by the tree event.
*/
protected abstract TreeItemDTO<? extends T> getOrCreateRelevantChild(TreeEvent treeEvt);
}

Some files were not shown because too many files have changed in this diff Show More