diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java index f0753d70bd..58b1bb5a56 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java @@ -425,9 +425,14 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel { // TBD: there probably should be an error level for each error addErrors(err, critErr); } - - //notify the UI of the new content added to the case + final Level level = critErr ? Level.SEVERE : Level.WARNING; new Thread(() -> { + //log error messages as Severe if there was a critical error otherwise as Warning. + //logging performed off of UI thread + for (String err : errList) { + Logger.getLogger(AddImageWizardAddingProgressVisual.class.getName()).log(level, "DatasourceID: {0} Error Message: {1}", new Object[]{dataSourceId.toString(), err}); + } + //notify the UI of the new content added to the case try { if (!contents.isEmpty()) { Case.getCurrentCaseThrows().notifyDataSourceAdded(contents.get(0), dataSourceId); diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED index 724758847b..a1f1432a70 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED @@ -12,6 +12,7 @@ CentralRepoDbChoice.PostgreSQL.Text=Custom PostgreSQL CentralRepoDbChoice.PostgreSQL_Multiuser.Text=PostgreSQL using multi-user settings CentralRepoDbChoice.Sqlite.Text=SQLite CentralRepoDbManager.connectionErrorMsg.text=Failed to connect to central repository database. +CentralRepositoryService.progressMsg.startingListener=Starting events listener... CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates... CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database.... CentralRepositoryService.serviceName=Central Repository Service diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java index 3db1fea511..6127209811 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java @@ -310,18 +310,25 @@ public interface CentralRepository { List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws CentralRepoException, CorrelationAttributeNormalizationException; /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * Retrieves correlation attribute instances from the central repository + * that match a given attribute type and value. * - * @param aType The type of the artifact - * @param value The correlation value + * @param type The correlation attribute type. + * @param value The correlation attribute value. * - * @return List of artifact instances for a given type/value + * @return The matching correlation attribute instances. * - * @throws CorrelationAttributeNormalizationException - * @throws CentralRepoException + * @throws CorrelationAttributeNormalizationException The exception is + * thrown if the supplied + * correlation attribute + * value cannot be + * normlaized. + * @throws CentralRepoException The exception is + * thrown if there is an + * error querying the + * central repository. */ - List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws CentralRepoException, CorrelationAttributeNormalizationException; + List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type type, String value) throws CentralRepoException, CorrelationAttributeNormalizationException; /** * Retrieves eamArtifact instances from the database that are associated diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java index 83dc61d06d..e2f20524e3 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2018-2020 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,7 +24,6 @@ import org.sleuthkit.autopsy.appservices.AutopsyService; import org.sleuthkit.autopsy.progress.ProgressIndicator; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.centralrepository.eventlisteners.CaseEventListener; -import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.TskCoreException; @@ -36,8 +35,7 @@ import org.sleuthkit.datamodel.TskCoreException; public class CentralRepositoryService implements AutopsyService { private CaseEventListener caseEventListener = new CaseEventListener(); - private IngestEventsListener ingestEventListener = new IngestEventsListener(); - + @Override @NbBundle.Messages({ "CentralRepositoryService.serviceName=Central Repository Service" @@ -47,7 +45,8 @@ public class CentralRepositoryService implements AutopsyService { } @NbBundle.Messages({ - "CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates..." + "CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates...", + "CentralRepositoryService.progressMsg.startingListener=Starting events listener..." }) @Override public void openCaseResources(CaseContext context) throws AutopsyServiceException { @@ -58,21 +57,20 @@ public class CentralRepositoryService implements AutopsyService { ProgressIndicator progress = context.getProgressIndicator(); progress.progress(Bundle.CentralRepositoryService_progressMsg_updatingSchema()); updateSchema(); - if (context.cancelRequested()) { return; } dataUpgradeForVersion1dot2(context.getCase()); - + if (context.cancelRequested()) { + return; + } + + progress.progress(Bundle.CentralRepositoryService_progressMsg_startingListener()); caseEventListener = new CaseEventListener(); - caseEventListener.installListeners(); - - ingestEventListener = new IngestEventsListener(); - ingestEventListener.installListeners(); - + caseEventListener.startUp(); } - + @NbBundle.Messages({ "CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database...." }) @@ -80,22 +78,16 @@ public class CentralRepositoryService implements AutopsyService { public void closeCaseResources(CaseContext context) throws AutopsyServiceException { ProgressIndicator progress = context.getProgressIndicator(); progress.progress(Bundle.CentralRepositoryService_progressMsg_waitingForListeners()); - if (caseEventListener != null) { - caseEventListener.uninstallListeners(); caseEventListener.shutdown(); } - - if (ingestEventListener != null) { - ingestEventListener.uninstallListeners(); - ingestEventListener.shutdown(); - } } /** - * Updates the central repository schema to the latest version. + * Updates the central repository database schema to the latest version. * - * @throws AutopsyServiceException + * @throws AutopsyServiceException The exception is thrown if there is an + * error updating the database schema. */ private void updateSchema() throws AutopsyServiceException { try { @@ -107,10 +99,11 @@ public class CentralRepositoryService implements AutopsyService { /** * Adds missing data source object IDs from data sources in this case to the - * corresponding records in the central repository. This is a data update to - * go with the v1.2 schema update. + * corresponding records in the central repository database. This is a data + * update to go with the v1.2 schema update. * - * @throws AutopsyServiceException + * @throws AutopsyServiceException The exception is thrown if there is an + * error updating the database. */ private void dataUpgradeForVersion1dot2(Case currentCase) throws AutopsyServiceException { try { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 6f47471891..18fd4d5c1d 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -40,7 +40,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; -import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.InvalidAccountIDException; import org.sleuthkit.datamodel.OsAccount; @@ -114,8 +113,54 @@ public class CorrelationAttributeUtil { return Collections.emptyList(); } - public static List makeCorrAttrsToSave(OsAccountInstance osAccountInstance) { - return makeCorrAttrsForSearch(osAccountInstance); + /** + * Gets the correlation attributes for an OS account instance represented as + * an OS account plus a data source. + * + * @param account The OS account. + * @param dataSource The data source. + * + * @return The correlation attributes. + */ + public static List makeCorrAttrsToSave(OsAccount account, Content dataSource) { + List correlationAttrs = new ArrayList<>(); + if (CentralRepository.isEnabled()) { + Optional accountAddr = account.getAddr(); + if (accountAddr.isPresent() && !isSystemOsAccount(accountAddr.get())) { + try { + CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); + CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( + CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), + accountAddr.get(), + correlationCase, + CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource), + dataSource.getName(), + "", + TskData.FileKnown.KNOWN, + account.getId()); + correlationAttrs.add(correlationAttributeInstance); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error querying central repository for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } catch (NoCurrentCaseException ex) { + logger.log(Level.SEVERE, String.format("Error getting current case for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.SEVERE, String.format("Error normalizing correlation attribute for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } + } + } + return correlationAttrs; + } + + /** + * Determines whether or not a given OS account address is a system account + * address. + * + * @param accountAddr The OS account address. + * + * @return True or false. + */ + private static boolean isSystemOsAccount(String accountAddr) { + return accountAddr.equals("S-1-5-18") || accountAddr.equals("S-1-5-19") || accountAddr.equals("S-1-5-20"); } /** @@ -787,43 +832,11 @@ public class CorrelationAttributeUtil { public static List makeCorrAttrsForSearch(OsAccountInstance osAccountInst) { List correlationAttrs = new ArrayList<>(); - if (CentralRepository.isEnabled()) { - OsAccount account = null; - DataSource dataSource = null; - if (osAccountInst != null) { - try { - account = osAccountInst.getOsAccount(); - dataSource = osAccountInst.getDataSource(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting information from OsAccountInstance.", ex); - } - } - if (account != null && dataSource != null) { - Optional accountAddr = account.getAddr(); - // Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system - // and they are not unique - if (accountAddr.isPresent() && !accountAddr.get().equals("S-1-5-18") && !accountAddr.get().equals("S-1-5-19") && !accountAddr.get().equals("S-1-5-20")) { - try { - - CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); - CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( - CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), - accountAddr.get(), - correlationCase, - CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource), - dataSource.getName(), - "", - TskData.FileKnown.KNOWN, - account.getId()); - correlationAttrs.add(correlationAttributeInstance); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS - } catch (NoCurrentCaseException ex) { - logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS - } - } + if (CentralRepository.isEnabled() && osAccountInst != null) { + try { + correlationAttrs.addAll(makeCorrAttrsToSave(osAccountInst.getOsAccount(), osAccountInst.getDataSource())); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error getting OS account from OS account instance '%s'", osAccountInst), ex); } } return correlationAttrs; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java index 92c23d77c8..cc8f739892 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2019 Basis Technology Corp. + * Copyright 2015-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -98,32 +98,33 @@ public class CorrelationDataSource implements Serializable { } /** - * Create a CorrelationDataSource object from a TSK Content object. This - * will add it to the central repository. + * Creates a central repository data source object from a case database data + * source. If the data source is not already present in the central + * repository, it is added. * - * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource - * @param dataSource the sleuthkit datasource that is being added to - * the central repository + * @param correlationCase The central repository case associated with the + * data aosurce. + * @param dataSource The case database data source. * - * @return + * @return The central repository data source. * - * @throws CentralRepoException + * @throws CentralRepoException This exception is thrown if there is an + * error creating the central repository data + * source. */ public static CorrelationDataSource fromTSKDataSource(CorrelationCase correlationCase, Content dataSource) throws CentralRepoException { + if (!CentralRepository.isEnabled()) { + throw new CentralRepoException(String.format("Central repository is not enabled, cannot create central repository data source for '%s'", dataSource)); + } + Case curCase; try { curCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { - throw new CentralRepoException("Autopsy case is closed"); - } - - CorrelationDataSource correlationDataSource = null; - boolean useCR = CentralRepository.isEnabled(); - if (useCR) { - correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId()); + throw new CentralRepoException("Error getting current case", ex); } + CorrelationDataSource correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId()); if (correlationDataSource == null) { String deviceId; String md5 = null; @@ -131,7 +132,7 @@ public class CorrelationDataSource implements Serializable { String sha256 = null; try { deviceId = curCase.getSleuthkitCase().getDataSource(dataSource.getId()).getDeviceId(); - + if (dataSource instanceof Image) { Image image = (Image) dataSource; md5 = image.getMd5(); @@ -139,15 +140,12 @@ public class CorrelationDataSource implements Serializable { sha256 = image.getSha256(); } } catch (TskDataException | TskCoreException ex) { - throw new CentralRepoException("Error getting data source info: " + ex.getMessage()); + throw new CentralRepoException("Error getting data source info from case database", ex); } - correlationDataSource = new CorrelationDataSource(correlationCase, deviceId, dataSource.getName(), dataSource.getId(), md5, sha1, sha256); - if (useCR) { - //add the correlation data source to the central repository and fill in the Central repository data source id in the object - correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource); - } + correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource); } + return correlationDataSource; } @@ -205,66 +203,68 @@ public class CorrelationDataSource implements Serializable { public String getName() { return name; } - + /** * @return the MD5 hash value */ public String getMd5() { return (md5Hash == null ? "" : md5Hash); } - + /** - * Set the MD5 hash value and persist to the Central Repository if available. - * + * Set the MD5 hash value and persist to the Central Repository if + * available. + * * @param md5Hash The MD5 hash value. + * * @throws CentralRepoException If there's an issue updating the Central - Repository. + * Repository. */ public void setMd5(String md5Hash) throws CentralRepoException { this.md5Hash = md5Hash; - + if (dataSourceObjectID != -1) { CentralRepository.getInstance().updateDataSourceMd5Hash(this); } } - + /** * @return the SHA-1 hash value */ public String getSha1() { return (sha1Hash == null ? "" : sha1Hash); } - + /** * Set the SHA-1 hash value and persist to the Central Repository if * available. - * + * * @param sha1Hash The SHA-1 hash value. */ public void setSha1(String sha1Hash) throws CentralRepoException { this.sha1Hash = sha1Hash; - + if (dataSourceObjectID != -1) { CentralRepository.getInstance().updateDataSourceSha1Hash(this); } } - + /** * @return the SHA-256 hash value */ public String getSha256() { return (sha256Hash == null ? "" : sha256Hash); } - + /** * Set the SHA-256 hash value and persist to the Central Repository if * available. - * + * * @param sha256Hash The SHA-256 hash value. */ public void setSha256(String sha256Hash) throws CentralRepoException { this.sha256Hash = sha256Hash; - + if (dataSourceObjectID != -1) { CentralRepository.getInstance().updateDataSourceSha256Hash(this); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java index 1b37217355..d7fd79dd8e 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java @@ -1289,7 +1289,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { if (values == null || values.isEmpty()) { throw new CorrelationAttributeNormalizationException("Cannot get artifact instances without specified values"); } - return getArtifactInstances(prepareGetInstancesSql(aType, values), aType); + return getCorrAttrInstances(prepareGetInstancesSql(aType, values), aType); } @Override @@ -1312,7 +1312,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { inValuesBuilder.append(sql); inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '"))); inValuesBuilder.append("')"); - return getArtifactInstances(inValuesBuilder.toString(), aType); + return getCorrAttrInstances(inValuesBuilder.toString(), aType); } /** @@ -1361,40 +1361,44 @@ abstract class RdbmsCentralRepo implements CentralRepository { } /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValues of the given eamArtifact. + * Retrieves correlation attribute instances from the central repository + * that match a given SQL query and correlation attribute type. * - * @param aType The type of the artifact - * @param values The list of correlation values to get - * CorrelationAttributeInstances for + * @param sql The SQL query. + * @param attrType The correlation attribute type. * - * @return List of artifact instances for a given type with the specified - * values + * @return The correlation attribute instanes. * - * @throws CorrelationAttributeNormalizationException - * @throws CentralRepoException + * @throws CorrelationAttributeNormalizationException The exception is + * thrown if the supplied + * correlation attribute + * value cannot be + * normlaized. + * @throws CentralRepoException The exception is + * thrown if there is an + * error querying the + * central repository. */ - private List getArtifactInstances(String sql, CorrelationAttributeInstance.Type aType) throws CorrelationAttributeNormalizationException, CentralRepoException { + private List getCorrAttrInstances(String sql, CorrelationAttributeInstance.Type attrType) throws CorrelationAttributeNormalizationException, CentralRepoException { + List corrAttrs = new ArrayList<>(); Connection conn = connect(); - List artifactInstances = new ArrayList<>(); - CorrelationAttributeInstance artifactInstance; PreparedStatement preparedStatement = null; ResultSet resultSet = null; try { preparedStatement = conn.prepareStatement(sql); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { - artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType); - artifactInstances.add(artifactInstance); + CorrelationAttributeInstance corrAttr = getCorrAttrFromResultSet(resultSet, attrType); + corrAttrs.add(corrAttr); } } catch (SQLException ex) { - throw new CentralRepoException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS + throw new CentralRepoException(String.format("Error getting correlation attributes using query %s", sql), ex); // NON-NLS } finally { CentralRepoDbUtil.closeResultSet(resultSet); CentralRepoDbUtil.closeStatement(preparedStatement); CentralRepoDbUtil.closeConnection(conn); } - return artifactInstances; + return corrAttrs; } /** @@ -1509,7 +1513,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { PreparedStatement preparedStatement = null; String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(instance.getCorrelationType()); ResultSet resultSet = null; - + try { if (correlationCaseId > 0 && sourceObjID != null && correlationDataSourceId > 0) { //The CorrelationCase is in the Central repository. @@ -3643,7 +3647,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { * * @throws SQLException when an expected column name is not in the resultSet */ - private CorrelationAttributeInstance getEamArtifactInstanceFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException { + private CorrelationAttributeInstance getCorrAttrFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException { if (null == resultSet) { return null; } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED index d71782c0ee..909275d224 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED @@ -1,18 +1,8 @@ caseeventlistener.evidencetag=Evidence -CaseEventsListener.module.name=Central Repository -CaseEventsListener.prevCaseComment.text=Users seen in previous cases -CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository) CentralRepositoryNotificationDialog.bulletHeader=This data is used to: CentralRepositoryNotificationDialog.bulletOne=Ignore common items (files, domains, and accounts) CentralRepositoryNotificationDialog.bulletThree=Create personas that group accounts CentralRepositoryNotificationDialog.bulletTwo=Identify where an item was previously seen CentralRepositoryNotificationDialog.finalRemarks=To limit what is stored, use the Central Repository options panel. CentralRepositoryNotificationDialog.header=Autopsy stores data about each case in its Central Repository. -IngestEventsListener.ingestmodule.name=Central Repository -IngestEventsListener.prevCaseComment.text=Previous Case: -# {0} - typeName -# {1} - count -IngestEventsListener.prevCount.text=Number of previous {0}: {1} -IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository) -IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository) Installer.centralRepoUpgradeFailed.title=Central repository disabled diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index b1cac61e4c..6610fcbd86 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -22,18 +22,13 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.EnumSet; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.logging.Level; -import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; -import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -43,7 +38,6 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent; -import org.sleuthkit.autopsy.casemodule.events.OsAcctInstancesAddedEvent; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; @@ -61,34 +55,20 @@ import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.datamodel.Tag; import org.sleuthkit.autopsy.events.AutopsyEvent; -import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.datamodel.AnalysisResult; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; import org.sleuthkit.datamodel.DataArtifact; -import org.sleuthkit.datamodel.OsAccount; -import org.sleuthkit.datamodel.OsAccountInstance; -import org.sleuthkit.datamodel.Score; -import org.sleuthkit.datamodel.SleuthkitCase; /** - * Listen for case events and update entries in the Central Repository database - * accordingly + * An Autopsy events listener for case events relevant to the central + * repository. */ @Messages({"caseeventlistener.evidencetag=Evidence"}) public final class CaseEventListener implements PropertyChangeListener { private static final Logger LOGGER = Logger.getLogger(CaseEventListener.class.getName()); - private final ExecutorService jobProcessingExecutor; - private static final String CASE_EVENT_THREAD_NAME = "Case-Event-Listener-%d"; - + private static final String CASE_EVENT_THREAD_NAME = "CR-Case-Event-Listener-%d"; private static final Set CASE_EVENTS_OF_INTEREST = EnumSet.of( Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED, @@ -96,14 +76,29 @@ public final class CaseEventListener implements PropertyChangeListener { Case.Events.DATA_SOURCE_ADDED, Case.Events.TAG_DEFINITION_CHANGED, Case.Events.CURRENT_CASE, - Case.Events.DATA_SOURCE_NAME_CHANGED, - Case.Events.OS_ACCT_INSTANCES_ADDED); + Case.Events.DATA_SOURCE_NAME_CHANGED); + private final ExecutorService jobProcessingExecutor; + /** + * Contructs an Autopsy events listener for case events relevant to the + * central repository. + */ public CaseEventListener() { jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(CASE_EVENT_THREAD_NAME).build()); } + /** + * Starts up the listener. + */ + public void startUp() { + Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); + } + + /** + * Shuts down the listener. + */ public void shutdown() { + Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor); } @@ -113,92 +108,73 @@ public final class CaseEventListener implements PropertyChangeListener { return; } - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Failed to get instance of db manager.", ex); + if (!CentralRepository.isEnabled()) { return; } - // If any changes are made to which event types are handled the change - // must also be made to CASE_EVENTS_OF_INTEREST. + CentralRepository centralRepo; + try { + centralRepo = CentralRepository.getInstance(); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, "Failed to access central repository", ex); + return; + } + + /* + * IMPORTANT: If any changes are made to which event types are handled, + * the change must also be made to the contents of the + * CASE_EVENTS_OF_INTEREST set. + */ switch (Case.Events.valueOf(evt.getPropertyName())) { case CONTENT_TAG_ADDED: - case CONTENT_TAG_DELETED: { - jobProcessingExecutor.submit(new ContentTagTask(dbManager, evt)); - } - break; - + case CONTENT_TAG_DELETED: + jobProcessingExecutor.submit(new ContentTagTask(centralRepo, evt)); + break; case BLACKBOARD_ARTIFACT_TAG_DELETED: - case BLACKBOARD_ARTIFACT_TAG_ADDED: { - jobProcessingExecutor.submit(new BlackboardTagTask(dbManager, evt)); - } - break; - - case DATA_SOURCE_ADDED: { - jobProcessingExecutor.submit(new DataSourceAddedTask(dbManager, evt)); - } - break; - case TAG_DEFINITION_CHANGED: { + case BLACKBOARD_ARTIFACT_TAG_ADDED: + jobProcessingExecutor.submit(new ArtifactTagTask(centralRepo, evt)); + break; + case DATA_SOURCE_ADDED: + jobProcessingExecutor.submit(new DataSourceAddedTask(centralRepo, evt)); + break; + case TAG_DEFINITION_CHANGED: jobProcessingExecutor.submit(new TagDefinitionChangeTask(evt)); - } - break; - case CURRENT_CASE: { - jobProcessingExecutor.submit(new CurrentCaseTask(dbManager, evt)); - } - break; - case DATA_SOURCE_NAME_CHANGED: { - jobProcessingExecutor.submit(new DataSourceNameChangedTask(dbManager, evt)); - } - break; - case OS_ACCT_INSTANCES_ADDED: { - if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) { - jobProcessingExecutor.submit(new OsAccountInstancesAddedTask(dbManager, evt)); - } - } - break; + break; + case CURRENT_CASE: + jobProcessingExecutor.submit(new CurrentCaseTask(centralRepo, evt)); + break; + case DATA_SOURCE_NAME_CHANGED: + jobProcessingExecutor.submit(new DataSourceNameChangedTask(centralRepo, evt)); + break; + default: + break; } } - /* - * Add all of our Case Event Listeners to the case. + /** + * Determines whether or not a tag has notable status. + * + * @param tag The tag. + * + * @return True or false. */ - public void installListeners() { - Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); - } - - /* - * Remove all of our Case Event Listeners from the case. - */ - public void uninstallListeners() { - Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); + private static boolean isNotableTag(Tag tag) { + return (tag != null && isNotableTagDefinition(tag.getName())); } /** - * Returns true if the tag has a notable status. + * Determines whether or not a tag definition calls for notable status. * - * @param t The tag to use in determination. + * @param tagDef The tag definition. * - * @return Whether or not it is a notable tag. + * @return True or false. */ - private static boolean isNotableTag(Tag t) { - return (t != null && isNotableTagName(t.getName())); + private static boolean isNotableTagDefinition(TagName tagDef) { + return (tagDef != null && TagsManager.getNotableTagDisplayNames().contains(tagDef.getDisplayName())); } /** - * Returns true if the tag name has a notable status. - * - * @param t The tag name to use in determination. - * - * @return Whether or not it is a notable tag name. - */ - private static boolean isNotableTagName(TagName t) { - return (t != null && TagsManager.getNotableTagDisplayNames().contains(t.getDisplayName())); - } - - /** - * Searches a list of tags for a tag with a notable status. + * Searches a list of tags for a tag with notable status. * * @param tags The tags to search. * @@ -208,7 +184,6 @@ public final class CaseEventListener implements PropertyChangeListener { if (tags == null) { return false; } - return tags.stream() .filter(CaseEventListener::isNotableTag) .findFirst() @@ -216,24 +191,25 @@ public final class CaseEventListener implements PropertyChangeListener { } /** - * Sets the known status of a blackboard artifact in the central repository. + * Sets the notable (known) status of a central repository correlation + * attribute corresponding to an artifact. * - * @param dbManager The central repo database. - * @param bbArtifact The blackboard artifact to set known status. - * @param knownStatus The new known status. + * @param centralRepo The central repository. + * @param artifact The artifact. + * @param notableStatus The new notable status. */ - private static void setArtifactKnownStatus(CentralRepository dbManager, BlackboardArtifact bbArtifact, TskData.FileKnown knownStatus) { - List convertedArtifacts = new ArrayList<>(); - if (bbArtifact instanceof DataArtifact) { - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) bbArtifact)); - } else if (bbArtifact instanceof AnalysisResult) { - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) bbArtifact)); + private static void setArtifactKnownStatus(CentralRepository centralRepo, BlackboardArtifact artifact, TskData.FileKnown notableStatus) { + List corrAttrInstances = new ArrayList<>(); + if (artifact instanceof DataArtifact) { + corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) artifact)); + } else if (artifact instanceof AnalysisResult) { + corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) artifact)); } - for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { + for (CorrelationAttributeInstance corrAttrInstance : corrAttrInstances) { try { - dbManager.setAttributeInstanceKnownStatus(eamArtifact, knownStatus); + centralRepo.setAttributeInstanceKnownStatus(corrAttrInstance, notableStatus); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database while setting artifact known status.", ex); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error setting correlation attribute instance known status", corrAttrInstance), ex); //NON-NLS } } } @@ -359,12 +335,12 @@ public final class CaseEventListener implements PropertyChangeListener { } } - private final class BlackboardTagTask implements Runnable { + private final class ArtifactTagTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; - private BlackboardTagTask(CentralRepository db, PropertyChangeEvent evt) { + private ArtifactTagTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @@ -644,7 +620,6 @@ public final class CaseEventListener implements PropertyChangeListener { */ if ((null == event.getOldValue()) && (event.getNewValue() instanceof Case)) { Case curCase = (Case) event.getNewValue(); - IngestEventsListener.resetCeModuleInstanceCount(); if (!CentralRepository.isEnabled()) { return; @@ -663,126 +638,6 @@ public final class CaseEventListener implements PropertyChangeListener { } // CURRENT_CASE } - @NbBundle.Messages({"CaseEventsListener.module.name=Central Repository", - "CaseEventsListener.prevCaseComment.text=Users seen in previous cases", - "CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"}) - /** - * Add OsAccount Instance to CR and find interesting items based on the - * OsAccount - */ - private final class OsAccountInstancesAddedTask implements Runnable { - - private final CentralRepository dbManager; - private final PropertyChangeEvent event; - private final String MODULE_NAME = Bundle.CaseEventsListener_module_name(); - - private OsAccountInstancesAddedTask(CentralRepository db, PropertyChangeEvent evt) { - dbManager = db; - event = evt; - } - - @Override - public void run() { - //Nothing to do here if the central repo is not enabled or if ingest is running but is set to not save data/make artifacts - if (!CentralRepository.isEnabled() - || (IngestManager.getInstance().isIngestRunning() && !(IngestEventsListener.isFlagSeenDevices() || IngestEventsListener.shouldCreateCrProperties()))) { - return; - } - - final OsAcctInstancesAddedEvent osAcctInstancesAddedEvent = (OsAcctInstancesAddedEvent) event; - List addedOsAccountNew = osAcctInstancesAddedEvent.getOsAccountInstances(); - for (OsAccountInstance osAccountInstance : addedOsAccountNew) { - try { - OsAccount osAccount = osAccountInstance.getOsAccount(); - List correlationAttributeInstances = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccountInstance); - if (correlationAttributeInstances.isEmpty()) { - return; - } - - Optional accountAddr = osAccount.getAddr(); - try { - // Save to the database if requested - if (IngestEventsListener.shouldCreateCrProperties()) { - for (CorrelationAttributeInstance correlationAttributeInstance : correlationAttributeInstances) { - dbManager.addArtifactInstance(correlationAttributeInstance); - } - } - - // Look up and create artifacts for previously seen accounts if requested - if (IngestEventsListener.isFlagSeenDevices()) { - - CorrelationAttributeInstance instanceWithTypeValue = null; - for (CorrelationAttributeInstance instance : correlationAttributeInstances) { - if (instance.getCorrelationType().getId() == CorrelationAttributeInstance.OSACCOUNT_TYPE_ID) { - instanceWithTypeValue = instance; - break; - } - } - - if (instanceWithTypeValue != null) { - List previousOccurences = dbManager.getArtifactInstancesByTypeValue(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue()); - - for (CorrelationAttributeInstance instance : previousOccurences) { - //we can get the first instance here since the case for all attributes will be the same - if (!instance.getCorrelationCase().getCaseUUID().equals(instanceWithTypeValue.getCorrelationCase().getCaseUUID())) { - SleuthkitCase tskCase = osAccount.getSleuthkitCase(); - Blackboard blackboard = tskCase.getBlackboard(); - - List caseDisplayNames = dbManager.getListCasesHavingArtifactInstances(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue()); - - // calculate score - Score score; - int numCases = caseDisplayNames.size(); - if (numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) { - score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) { - score = Score.SCORE_NONE; - } else { - // don't make an Analysis Result, the artifact is too common. - continue; - } - - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously seen in cases " + prevCases; - Collection attributesForNewArtifact = Arrays.asList( - new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.CaseEventsListener_prevExists_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - instance.getCorrelationType().getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - instanceWithTypeValue.getCorrelationValue()), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - BlackboardArtifact newAnalysisResult = osAccount.newAnalysisResult( - BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score, - null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult(); - try { - blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null); - break; - } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS - } - } - } - } - } - - } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex); - } - } - } - } - private final class DataSourceNameChangedTask implements Runnable { private final CentralRepository dbManager; @@ -816,6 +671,7 @@ public final class CaseEventListener implements PropertyChangeListener { LOGGER.log(Level.SEVERE, "No open case", ex); } } - } // DATA_SOURCE_NAME_CHANGED + } } + } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java deleted file mode 100644 index 6af51f69af..0000000000 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java +++ /dev/null @@ -1,673 +0,0 @@ -/* - * Central Repository - * - * Copyright 2017-2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.centralrepository.eventlisteners; - -import com.google.common.util.concurrent.ThreadFactoryBuilder; -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import static java.lang.Boolean.FALSE; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.EnumSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.logging.Level; -import java.util.stream.Collectors; -import org.apache.commons.lang3.StringUtils; -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; -import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.autopsy.coreutils.ThreadUtils; -import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; -import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisEvent; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; -import org.sleuthkit.datamodel.DataArtifact; -import org.sleuthkit.datamodel.Score; -import org.sleuthkit.datamodel.TskData; - -/** - * Listen for ingest events and update entries in the Central Repository - * database accordingly - */ -@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Central Repository"}) -public class IngestEventsListener { - - private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName()); - private static final Set INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED); - private static final Set INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(DATA_ADDED); - private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name(); - private static int correlationModuleInstanceCount; - private static boolean flagNotableItems; - private static boolean flagSeenDevices; - private static boolean createCrProperties; - private static boolean flagUniqueArtifacts; - private static final String INGEST_EVENT_THREAD_NAME = "Ingest-Event-Listener-%d"; - private final ExecutorService jobProcessingExecutor; - private final PropertyChangeListener pcl1 = new IngestModuleEventListener(); - private final PropertyChangeListener pcl2 = new IngestJobEventListener(); - final Collection recentlyAddedCeArtifacts = new LinkedHashSet<>(); - - static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10; - static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20; - - public IngestEventsListener() { - jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(INGEST_EVENT_THREAD_NAME).build()); - } - - public void shutdown() { - ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor); - } - - /* - * Add all of our Ingest Event Listeners to the IngestManager Instance. - */ - public void installListeners() { - IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl1); - IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl2); - } - - /* - * Remove all of our Ingest Event Listeners from the IngestManager Instance. - */ - public void uninstallListeners() { - IngestManager.getInstance().removeIngestModuleEventListener(pcl1); - IngestManager.getInstance().removeIngestJobEventListener(pcl2); - } - - /** - * Increase the number of IngestEventsListeners adding contents to the - * Central Repository. - */ - public synchronized static void incrementCorrelationEngineModuleCount() { - correlationModuleInstanceCount++; //Should be called once in the Central Repository module's startup method. - } - - /** - * Decrease the number of IngestEventsListeners adding contents to the - * Central Repository. - */ - public synchronized static void decrementCorrelationEngineModuleCount() { - if (getCeModuleInstanceCount() > 0) { //prevent it ingestJobCounter from going negative - correlationModuleInstanceCount--; //Should be called once in the Central Repository module's shutdown method. - } - } - - /** - * Reset the counter which keeps track of if the Central Repository Module - * is being run during injest to 0. - */ - synchronized static void resetCeModuleInstanceCount() { - correlationModuleInstanceCount = 0; //called when a case is opened in case for some reason counter was not reset - } - - /** - * Whether or not the Central Repository Module is enabled for any of the - * currently running ingest jobs. - * - * @return boolean True for Central Repository enabled, False for disabled - */ - public synchronized static int getCeModuleInstanceCount() { - return correlationModuleInstanceCount; - } - - /** - * Are notable items being flagged? - * - * @return True if flagging notable items; otherwise false. - */ - public synchronized static boolean isFlagNotableItems() { - return flagNotableItems; - } - - /** - * Are previously seen devices being flagged? - * - * @return True if flagging seen devices; otherwise false. - */ - public synchronized static boolean isFlagSeenDevices() { - return flagSeenDevices; - } - - /** - * Are correlation properties being created - * - * @return True if creating correlation properties; otherwise false. - */ - public synchronized static boolean shouldCreateCrProperties() { - return createCrProperties; - } - - /** - * Configure the listener to flag notable items or not. - * - * @param value True to flag notable items; otherwise false. - */ - public synchronized static void setFlagNotableItems(boolean value) { - flagNotableItems = value; - } - - /** - * Configure the listener to flag previously seen devices or not. - * - * @param value True to flag seen devices; otherwise false. - */ - public synchronized static void setFlagSeenDevices(boolean value) { - flagSeenDevices = value; - } - - /** - * Configure the listener to flag unique apps or not. - * - * @param value True to flag unique apps; otherwise false. - */ - public synchronized static void setFlagUniqueArtifacts(boolean value) { - flagUniqueArtifacts = value; - } - - /** - * Are unique apps being flagged? - * - * @return True if flagging unique apps; otherwise false. - */ - public synchronized static boolean isFlagUniqueArtifacts() { - return flagUniqueArtifacts; - } - - /** - * Configure the listener to create correlation properties - * - * @param value True to create properties; otherwise false. - */ - public synchronized static void setCreateCrProperties(boolean value) { - createCrProperties = value; - } - - /** - * Make a "previously seen" artifact based on a new artifact being - * previously seen. - * - * @param originalArtifact Original artifact that we want to flag - * @param caseDisplayNames List of case names artifact was previously seen - * in - * @param aType The correlation type. - * @param value The correlation value. - */ - @NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", - "IngestEventsListener.prevCaseComment.text=Previous Case: "}) - static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List caseDisplayNames, - CorrelationAttributeInstance.Type aType, String value) { - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously marked as notable in cases " + prevCases; - Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.IngestEventsListener_prevTaggedSet_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevTaggedSet_text(), - Score.SCORE_NOTABLE, justification); - } - - /** - * Create a "previously seen" hit for a device which was previously seen in - * the central repository. NOTE: Artifacts that are too common will be - * skipped. - * - * @param originalArtifact the artifact to create the "previously seen" item - * for - * @param caseDisplayNames the case names the artifact was previously seen - * in - * @param aType The correlation type. - * @param value The correlation value. - */ - @NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)", - "# {0} - typeName", - "# {1} - count", - "IngestEventsListener.prevCount.text=Number of previous {0}: {1}"}) - static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List caseDisplayNames, - CorrelationAttributeInstance.Type aType, String value) { - - // calculate score - Score score; - int numCases = caseDisplayNames.size(); - if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) { - score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) { - score = Score.SCORE_NONE; - } else { - // don't make an Analysis Result, the artifact is too common. - return; - } - - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously seen in cases " + prevCases; - Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.IngestEventsListener_prevExists_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(), - score, justification); - } - - /** - * Create a "previously unseen" hit for an application which was never seen - * in the central repository. - * - * @param originalArtifact the artifact to create the "previously unseen" - * item for - * @param aType The correlation type. - * @param value The correlation value. - */ - static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) { - Collection attributesForNewArtifact = Arrays.asList( - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value)); - makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "", - Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before"); - } - - /** - * Make an artifact to flag the passed in artifact. - * - * @param newArtifactType Type of artifact to create. - * @param originalArtifact Artifact in current case we want to flag - * @param attributesForNewArtifact Attributes to assign to the new artifact - * @param configuration The configuration to be specified for the - * new artifact hit - * @param score sleuthkit.datamodel.Score to be assigned - * to this artifact - * @param justification Justification string - */ - private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection attributesForNewArtifact, String configuration, - Score score, String justification) { - try { - SleuthkitCase tskCase = originalArtifact.getSleuthkitCase(); - Blackboard blackboard = tskCase.getBlackboard(); - // Create artifact if it doesn't already exist. - BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID()); - if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) { - BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult( - newArtifactType, score, - null, configuration, justification, attributesForNewArtifact) - .getAnalysisResult(); - - try { - blackboard.postArtifact(newArtifact, MODULE_NAME, null); - } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS - } - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS - } catch (IllegalStateException ex) { - LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS - } - } - - private class IngestModuleEventListener implements PropertyChangeListener { - - @Override - public void propertyChange(PropertyChangeEvent evt) { - //if ingest is running we want there to check if there is a Central Repository module running - //sometimes artifacts are generated by DSPs or other sources while ingest is not running - //in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate - if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) { - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex); - return; - } - switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) { - case DATA_ADDED: { - //if ingest isn't running create the "previously seen" items, - // otherwise use the ingest module setting to determine if we create "previously seen" items - boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems(); - boolean flagPrevious = !IngestManager.getInstance().isIngestRunning() || isFlagSeenDevices(); - boolean createAttributes = !IngestManager.getInstance().isIngestRunning() || shouldCreateCrProperties(); - boolean flagUnique = !IngestManager.getInstance().isIngestRunning() || isFlagUniqueArtifacts(); - jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable, flagPrevious, createAttributes, flagUnique)); - break; - } - default: - break; - } - } - } - } - - private class IngestJobEventListener implements PropertyChangeListener { - - @Override - public void propertyChange(PropertyChangeEvent evt) { - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex); - return; - } - - switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) { - case DATA_SOURCE_ANALYSIS_COMPLETED: { - jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt)); - break; - } - default: - break; - } - } - - } - - private final class AnalysisCompleteTask implements Runnable { - - private final CentralRepository dbManager; - private final PropertyChangeEvent event; - - private AnalysisCompleteTask(CentralRepository db, PropertyChangeEvent evt) { - dbManager = db; - event = evt; - } - - @Override - public void run() { - // clear the tracker to reduce memory usage - if (getCeModuleInstanceCount() == 0) { - recentlyAddedCeArtifacts.clear(); - } - //else another instance of the Central Repository Module is still being run. - - /* - * Ensure the data source in the Central Repository has hash values - * that match those in the case database. - */ - if (!CentralRepository.isEnabled()) { - return; - } - Content dataSource; - String dataSourceName = ""; - long dataSourceObjectId = -1; - try { - dataSource = ((DataSourceAnalysisEvent) event).getDataSource(); - /* - * We only care about Images for the purpose of updating hash - * values. - */ - if (!(dataSource instanceof Image)) { - return; - } - - dataSourceName = dataSource.getName(); - dataSourceObjectId = dataSource.getId(); - - Case openCase = Case.getCurrentCaseThrows(); - - CorrelationCase correlationCase = dbManager.getCase(openCase); - if (null == correlationCase) { - correlationCase = dbManager.newCase(openCase); - } - - CorrelationDataSource correlationDataSource = dbManager.getDataSource(correlationCase, dataSource.getId()); - if (correlationDataSource == null) { - // Add the data source. - CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource); - } else { - // Sync the data source hash values if necessary. - if (dataSource instanceof Image) { - Image image = (Image) dataSource; - - String imageMd5Hash = image.getMd5(); - if (imageMd5Hash == null) { - imageMd5Hash = ""; - } - String crMd5Hash = correlationDataSource.getMd5(); - if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) { - correlationDataSource.setMd5(imageMd5Hash); - } - - String imageSha1Hash = image.getSha1(); - if (imageSha1Hash == null) { - imageSha1Hash = ""; - } - String crSha1Hash = correlationDataSource.getSha1(); - if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) { - correlationDataSource.setSha1(imageSha1Hash); - } - - String imageSha256Hash = image.getSha256(); - if (imageSha256Hash == null) { - imageSha256Hash = ""; - } - String crSha256Hash = correlationDataSource.getSha256(); - if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) { - correlationDataSource.setSha256(imageSha256Hash); - } - } - } - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format( - "Unable to fetch data from the Central Repository for data source '%s' (obj_id=%d)", - dataSourceName, dataSourceObjectId), ex); - } catch (NoCurrentCaseException ex) { - LOGGER.log(Level.SEVERE, "No current case opened.", ex); - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format( - "Unable to fetch data from the case database for data source '%s' (obj_id=%d)", - dataSourceName, dataSourceObjectId), ex); - } - } // DATA_SOURCE_ANALYSIS_COMPLETED - } - - private final class DataAddedTask implements Runnable { - - private final CentralRepository dbManager; - private final PropertyChangeEvent event; - private final boolean flagNotableItemsEnabled; - private final boolean flagPreviousItemsEnabled; - private final boolean createCorrelationAttributes; - private final boolean flagUniqueItemsEnabled; - - private DataAddedTask(CentralRepository db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes, boolean flagUnique) { - this.dbManager = db; - this.event = evt; - this.flagNotableItemsEnabled = flagNotableItemsEnabled; - this.flagPreviousItemsEnabled = flagPreviousItemsEnabled; - this.createCorrelationAttributes = createCorrelationAttributes; - this.flagUniqueItemsEnabled = flagUnique; - } - - @Override - public void run() { - if (!CentralRepository.isEnabled()) { - return; - } - final ModuleDataEvent mde = (ModuleDataEvent) event.getOldValue(); - Collection bbArtifacts = mde.getArtifacts(); - if (null == bbArtifacts) { //the ModuleDataEvents don't always have a collection of artifacts set - return; - } - List eamArtifacts = new ArrayList<>(); - - for (BlackboardArtifact bbArtifact : bbArtifacts) { - // makeCorrAttrToSave will filter out artifacts which should not be sources of CR data. - List convertedArtifacts = new ArrayList<>(); - if (bbArtifact instanceof DataArtifact) { - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact)); - } - for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { - try { - // Only do something with this artifact if it's unique within the job - if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) { - - // Get a list of instances for a given value (hash, email, etc.) - List previousOccurrences = new ArrayList<>(); - // check if we are flagging things - if (flagNotableItemsEnabled || flagPreviousItemsEnabled || flagUniqueItemsEnabled) { - try { - previousOccurrences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - - // make sure the previous instances do not contain current case - for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { - CorrelationAttributeInstance instance = iterator.next(); - if (instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) { - // this is the current case - remove the instace from the previousOccurrences list - iterator.remove(); - } - } - } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.INFO, String.format("Unable to flag previously seen device: %s.", eamArtifact.toString()), ex); - } - } - - // Was it previously marked as bad? - // query db for artifact instances having this TYPE/VALUE and knownStatus = "Bad". - // if getKnownStatus() is "Unknown" and this artifact instance was marked bad in a previous case, - // create TSK_PREVIOUSLY_SEEN artifact on BB. - if (flagNotableItemsEnabled) { - List caseDisplayNames = getCaseDisplayNamesForNotable(previousOccurrences); - if (!caseDisplayNames.isEmpty()) { - makeAndPostPreviousNotableArtifact(bbArtifact, - caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - - // if we have marked this artifact as notable, then skip the analysis of whether it was previously seen - continue; - } - } - - // flag previously seen devices and communication accounts (emails, phones, etc) - if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty() - && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) { - - List caseDisplayNames = getCaseDisplayNames(previousOccurrences); - makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - } - - // flag previously unseen apps and domains - if (flagUniqueItemsEnabled - && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { - - if (previousOccurrences.isEmpty()) { - makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - } - } - if (createCorrelationAttributes) { - eamArtifacts.add(eamArtifact); - } - } - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error counting notable artifacts.", ex); - } - } - } - if (FALSE == eamArtifacts.isEmpty()) { - for (CorrelationAttributeInstance eamArtifact : eamArtifacts) { - try { - dbManager.addArtifactInstance(eamArtifact); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error adding artifact to database.", ex); //NON-NLS - } - } - } // DATA_ADDED - } - } - - /** - * Gets case display names for a list of CorrelationAttributeInstance. - * - * @param occurrences List of CorrelationAttributeInstance - * - * @return List of case display names - */ - private List getCaseDisplayNames(List occurrences) { - List caseNames = new ArrayList<>(); - for (CorrelationAttributeInstance occurrence : occurrences) { - caseNames.add(occurrence.getCorrelationCase().getDisplayName()); - } - return caseNames; - } - - /** - * Gets case display names for only occurrences marked as NOTABLE/BAD. - * - * @param occurrences List of CorrelationAttributeInstance - * - * @return List of case display names of NOTABLE/BAD occurrences - */ - private List getCaseDisplayNamesForNotable(List occurrences) { - List caseNames = new ArrayList<>(); - for (CorrelationAttributeInstance occurrence : occurrences) { - if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) { - caseNames.add(occurrence.getCorrelationCase().getDisplayName()); - } - } - return caseNames; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED index d15eb90cb2..cf2f2f4a12 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED @@ -1,12 +1,26 @@ -CentralRepoIngestModel_name_header=Name:
-CentralRepoIngestModel_previous_case_header=
Previous Cases:
-CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module. -CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized -CentralRepoIngestModule.prevCaseComment.text=Previous Case: -CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository) -CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
-# {0} - Name of file that is Notable -CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0} +CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type +CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository +CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository +CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository +CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository +CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled +CentralRepoIngestModule_filename_inbox_msg_header=File Name +CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash +CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository +CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case +CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute +# {0} - Name of item that is Notable +CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0} +# {0} - list of cases +CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0} +CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) +CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases +# {0} - list of cases +CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0} +CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository) +CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases +CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository) +CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases CentralRepoIngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation CentralRepoIngestModuleFactory.ingestmodule.name=Central Repository IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index 0ed9f53518..0729cad326 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2021 Basis Technology Corp. + * Copyright 2021-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,37 +18,328 @@ */ package org.sleuthkit.autopsy.centralrepository.ingestmodule; -import java.util.concurrent.atomic.AtomicLong; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.logging.Level; +import org.apache.commons.lang3.StringUtils; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.getOccurrencesInOtherCases; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevSeenAnalysisResult; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevUnseenAnalysisResult; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.DataArtifact; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.OsAccount; +import org.sleuthkit.datamodel.OsAccountManager; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; /** - * RJCTODO - * - * NOTE TO REVIEWER: - * - * This is a placeholder data artifact ingest module that counts the number of - * data artifacts it processes and posts the final count to the ingest inbox. - * The guts of the module will be supplied by a later PR. + * A data artifact ingest module that adds correlation attributes for data + * artifacts and OS accounts to the central repository and makes analysis + * results based on previous occurences. When the ingest job is completed, + * ensures the data source in the central repository has hash values that match + * those in the case database. */ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule { - private final AtomicLong artifactCounter = new AtomicLong(); + private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName()); + private final boolean flagNotableItems; + private final boolean flagPrevSeenDevices; + private final boolean flagUniqueArtifacts; + private final boolean saveCorrAttrInstances; + private final Set corrAttrValuesAlreadyProcessed; + private CentralRepository centralRepo; + private IngestJobContext context; + /** + * Constructs a data artifact ingest module that adds correlation attributes + * for data artifacts and OS accounts to the central repository and makes + * analysis results based on previous occurences. When the ingest job is + * completed, ensures the data source in the central repository has hash + * values that match those in the case database. + * + * @param settings The ingest job settings for this module. + */ + CentralRepoDataArtifactIngestModule(IngestSettings settings) { + flagNotableItems = settings.isFlagTaggedNotableItems(); + flagPrevSeenDevices = settings.isFlagPreviousDevices(); + flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); + saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); + corrAttrValuesAlreadyProcessed = new LinkedHashSet<>(); + } + + @NbBundle.Messages({ + "CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled", + "CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository", + "CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case", + "CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository" + }) + @Override + public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; + + if (!CentralRepository.isEnabled()) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); // May be displayed to user. + } + + try { + centralRepo = CentralRepository.getInstance(); + } catch (CentralRepoException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex); + } + + /* + * Don't allow a SQLite central repository to be used for a multi-user + * case. + */ + try { + Case currentCase = Case.getCurrentCaseThrows(); + if ((currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crDatabaseTypeMismatch()); + } + } catch (NoCurrentCaseException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex); + } + } + + /** + * Translates the attributes of a data artifact into central repository + * correlation attributes and uses them to create analysis results and new + * central repository correlation attribute instances, depending on ingest + * job settings. + * + * @param artifact The data artifact. + * + * @return An ingest module process result. + */ @Override public ProcessResult process(DataArtifact artifact) { - artifactCounter.incrementAndGet(); + if (flagNotableItems || flagPrevSeenDevices || flagUniqueArtifacts || saveCorrAttrInstances) { + for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)) { + if (corrAttrValuesAlreadyProcessed.add(corrAttr.toString())) { + makeAnalysisResults(artifact, corrAttr); + if (saveCorrAttrInstances) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s' (job ID=%d)", corrAttr, artifact, context.getJobId()), ex); //NON-NLS + } + } + } + } + } return ProcessResult.OK; } + /** + * Makes analysis results for a data artifact based on previous occurrences, + * if any, of a correlation attribute. + * + * @param artifact The data artifact. + * @param corrAttr A correlation attribute for the data artifact. + */ + private void makeAnalysisResults(DataArtifact artifact, CorrelationAttributeInstance corrAttr) { + List previousOccurrences = null; + if (flagNotableItems) { + previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); + if (!previousOccurrences.isEmpty()) { + Set previousCases = new HashSet<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) { + previousCases.add(occurrence.getCorrelationCase().getDisplayName()); + } + } + if (!previousCases.isEmpty()) { + makePrevNotableAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); + } + } + } + + if (flagPrevSeenDevices + && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) { + if (previousOccurrences == null) { + previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); + } + if (!previousOccurrences.isEmpty()) { + Set previousCases = getPreviousCases(previousOccurrences); + if (!previousCases.isEmpty()) { + makePrevSeenAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); + } + } + } + + if (flagUniqueArtifacts + && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { + if (previousOccurrences == null) { + previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); + } + if (previousOccurrences.isEmpty()) { + makePrevUnseenAnalysisResult(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); + } + } + } + + /** + * Gets a unique set of previous cases, represented by their names, from a + * list of previous occurrences of correlation attributes. + * + * @param previousOccurrences The correlations attributes. + * + * @return The names of the previous cases. + */ + private Set getPreviousCases(List previousOccurrences) { + Set previousCases = new HashSet<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + previousCases.add(occurrence.getCorrelationCase().getDisplayName()); + } + return previousCases; + } + @Override public void shutDown() { - IngestServices.getInstance().postMessage(IngestMessage.createMessage( - IngestMessage.MessageType.INFO, - CentralRepoIngestModuleFactory.getModuleName(), - String.format("%d data artifacts processed", artifactCounter.get()))); //NON-NLS + analyzeOsAccounts(); + if (saveCorrAttrInstances) { + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS + } + } + syncDataSourceHashes(); + } + + /** + * Queries the case database for any OS accounts assoicated with the data + * source for the ingest job. The attributes of any OS account returned by + * the query are translated into central repository correlation attributes + * and used them to create analysis results and new central repository + * correlation attribute instances, depending on ingest job settings. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", + "CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" + }) + private void analyzeOsAccounts() { + if (saveCorrAttrInstances || flagPrevSeenDevices) { + try { + OsAccountManager osAccountMgr = Case.getCurrentCaseThrows().getSleuthkitCase().getOsAccountManager(); + List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(context.getDataSource().getId()); + for (OsAccount osAccount : osAccounts) { + for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, context.getDataSource())) { + if (flagPrevSeenDevices) { + makeAnalysisResults(osAccount, corrAttr); + } + if (saveCorrAttrInstances) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s'(job ID=%d)", corrAttr, osAccount, context.getJobId()), ex); //NON-NLS + } + } + } + } + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source '%s' (job ID=%d)", context.getDataSource(), context.getJobId()), ex); + } + } + } + + /** + * Makes analysis results for an OS Account based on previous occurrences, + * if any, of a correlation attribute. + * + * @param artifact The data artifact. + * @param corrAttr A correlation attribute for the data artifact. + */ + private void makeAnalysisResults(OsAccount osAccount, CorrelationAttributeInstance corrAttr) { + if (flagPrevSeenDevices) { + List previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); + if (!previousOccurrences.isEmpty()) { + Set previousCases = getPreviousCases(previousOccurrences); + if (!previousCases.isEmpty()) { + makePrevSeenAnalysisResult(osAccount, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); + } + } + } + } + + /** + * Ensures the data source in the central repository has hash values that + * match those in the case database. + */ + private void syncDataSourceHashes() { + if (!(context.getDataSource() instanceof Image)) { + return; + } + + try { + Case currentCase = Case.getCurrentCaseThrows(); + CorrelationCase correlationCase = centralRepo.getCase(currentCase); + if (correlationCase == null) { + correlationCase = centralRepo.newCase(currentCase); + } + + CorrelationDataSource correlationDataSource = centralRepo.getDataSource(correlationCase, context.getDataSource().getId()); + if (correlationDataSource == null) { + correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, context.getDataSource()); + } + + Image image = (Image) context.getDataSource(); + String imageMd5Hash = image.getMd5(); + if (imageMd5Hash == null) { + imageMd5Hash = ""; + } + String crMd5Hash = correlationDataSource.getMd5(); + if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) { + correlationDataSource.setMd5(imageMd5Hash); + } + + String imageSha1Hash = image.getSha1(); + if (imageSha1Hash == null) { + imageSha1Hash = ""; + } + String crSha1Hash = correlationDataSource.getSha1(); + if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) { + correlationDataSource.setSha1(imageSha1Hash); + } + + String imageSha256Hash = image.getSha256(); + if (imageSha256Hash == null) { + imageSha256Hash = ""; + } + String crSha256Hash = correlationDataSource.getSha256(); + if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) { + correlationDataSource.setSha256(imageSha256Hash); + } + + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex); + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java index df6bd58801..32aaedfac3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2011-2021 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,11 +18,10 @@ */ package org.sleuthkit.autopsy.centralrepository.ingestmodule; -import java.util.Arrays; -import java.util.Collection; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.logging.Level; -import java.util.stream.Collectors; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -32,95 +31,52 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms; -import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager; -import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener; -import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; -import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_PREVIOUSLY_NOTABLE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; import org.sleuthkit.datamodel.HashUtility; -import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; -import org.sleuthkit.datamodel.Score; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult; /** - * Ingest module for inserting entries into the Central Repository database on - * ingest of a data source + * A file ingest module that adds correlation attributes for files to the + * central repository, and makes previously notable analysis results for files + * marked as notable in other cases. */ -@Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", - "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) final class CentralRepoIngestModule implements FileIngestModule { - private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); - static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false; - static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false; - static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false; - static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; - - private final static Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName()); - private final IngestServices services = IngestServices.getInstance(); + private static final Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName()); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); - private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); - private long jobId; - private CorrelationCase eamCase; - private CorrelationDataSource eamDataSource; + private final boolean flagNotableItems; + private final boolean saveCorrAttrInstances; private CorrelationAttributeInstance.Type filesType; - private final boolean flagTaggedNotableItems; - private final boolean flagPreviouslySeenDevices; - private Blackboard blackboard; - private final boolean createCorrelationProperties; - private final boolean flagUniqueArtifacts; private IngestJobContext context; - + private CentralRepository centralRepo; + /** - * Instantiate the Central Repository ingest module. + * Constructs a file ingest module that adds correlation attributes for + * files to the central repository, and makes previously notable analysis + * results for files marked as notable in other cases. * - * @param settings The ingest settings for the module instance. + * @param settings The ingest job settings. */ CentralRepoIngestModule(IngestSettings settings) { - flagTaggedNotableItems = settings.isFlagTaggedNotableItems(); - flagPreviouslySeenDevices = settings.isFlagPreviousDevices(); - createCorrelationProperties = settings.shouldCreateCorrelationProperties(); - flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); - } - + flagNotableItems = settings.isFlagTaggedNotableItems(); + saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); + } + @Override public ProcessResult process(AbstractFile abstractFile) { - if (CentralRepository.isEnabled() == false) { - /* - * Not signaling an error for now. This is a workaround for the way - * all newly didscovered ingest modules are automatically anabled. - * - * TODO (JIRA-2731): Add isEnabled API for ingest modules. - */ + if (!flagNotableItems && !saveCorrAttrInstances) { return ProcessResult.OK; } - try { - blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); - } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Exception while getting open case.", ex); - return ProcessResult.ERROR; - } - - if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { + if (!filesType.isEnabled()) { return ProcessResult.OK; } @@ -128,290 +84,119 @@ final class CentralRepoIngestModule implements FileIngestModule { return ProcessResult.OK; } - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); - return ProcessResult.ERROR; - } - - // only continue if we are correlating filesType - if (!filesType.isEnabled()) { + if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { return ProcessResult.OK; } - - // get the hash because we're going to correlate it + + /* + * The correlation attribute value for a file is its MD5 hash. This + * module cannot do anything with a file if the hash calculation has not + * been done, but the decision has been made to not do a hash + * calculation here if the file hashing and lookup module is not in this + * pipeline ahead of this module (affirmed per BC, 11/8/21). + */ String md5 = abstractFile.getMd5Hash(); if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) { return ProcessResult.OK; } - /* - * Search the central repo to see if this file was previously marked as - * being bad. Create artifact if it was. - */ - if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) { + if (flagNotableItems) { try { TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query"); - List caseDisplayNamesList = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5); + Set otherCases = new HashSet<>(); + otherCases.addAll(centralRepo.getListCasesHavingArtifactInstancesKnownBad(filesType, md5)); HealthMonitor.submitTimingMetric(timingMetric); - if (!caseDisplayNamesList.isEmpty()) { - postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5); + if (!otherCases.isEmpty()) { + makePrevNotableAnalysisResult(abstractFile, otherCases, filesType, md5, context.getDataSource().getId(), context.getJobId()); } } catch (CentralRepoException ex) { logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS - return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS - return ProcessResult.ERROR; } } - // insert this file into the central repository - if (createCorrelationProperties) { - try { - CorrelationAttributeInstance cefi = new CorrelationAttributeInstance( - filesType, - md5, - eamCase, - eamDataSource, - abstractFile.getParentPath() + abstractFile.getName(), - null, - TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database. - , - abstractFile.getId()); - dbManager.addAttributeInstanceBulk(cefi); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS - return ProcessResult.ERROR; - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS - return ProcessResult.ERROR; + if (saveCorrAttrInstances) { + List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(abstractFile); + for (CorrelationAttributeInstance corrAttr : corrAttrs) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + } } } + return ProcessResult.OK; } @Override public void shutDown() { - IngestEventsListener.decrementCorrelationEngineModuleCount(); - - if ((CentralRepository.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) { - return; + if (refCounter.decrementAndGet(context.getJobId()) == 0) { + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error committing bulk insert of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS + } } - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); - return; - } - try { - dbManager.commitAttributeInstancesBulk(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS - } - try { - Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamDataSource); - logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS - } - - // TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk. - refCounter.decrementAndGet(jobId); - } - - // see ArtifactManagerTimeTester for details + } + @Messages({ - "CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized", - "CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module." + "CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository", + "CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository", + "CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository" }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; - - IngestEventsListener.incrementCorrelationEngineModuleCount(); + + if (!CentralRepository.isEnabled()) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); + } + + try { + centralRepo = CentralRepository.getInstance(); + } catch (CentralRepoException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex); + } /* - * Tell the IngestEventsListener to flag notable items based on the - * current module's configuration. This is a work around for the lack of - * an artifacts pipeline. Note that this can be changed by another - * module instance. All modules are affected by the value. While not - * ideal, this will be good enough until a better solution can be - * posited. - * - * Note: Flagging cannot be disabled if any other instances of the - * Central Repository module are running. This restriction is to prevent - * missing results in the case where the first module is flagging - * notable items, and the proceeding module (with flagging disabled) - * causes the first to stop flagging. + * Make sure the correlation attribute type definition is in the central + * repository. Currently (11/8/21) it is cached, but there is no harm in + * saving it here for use in process(). */ - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagNotableItems()) { - IngestEventsListener.setFlagNotableItems(flagTaggedNotableItems); - } - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagSeenDevices()) { - IngestEventsListener.setFlagSeenDevices(flagPreviouslySeenDevices); - } - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.shouldCreateCrProperties()) { - IngestEventsListener.setCreateCrProperties(createCorrelationProperties); - } - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) { - IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts); - } - - if (CentralRepository.isEnabled() == false) { - /* - * Not throwing the customary exception for now. This is a - * workaround for the way all newly didscovered ingest modules are - * automatically anabled. - * - * TODO (JIRA-2731): Add isEnabled API for ingest modules. - */ - if (RuntimeProperties.runningWithGUI()) { - if (1L == warningMsgRefCounter.incrementAndGet(jobId)) { - MessageNotifyUtil.Notify.warn(Bundle.CentralRepoIngestModule_notfyBubble_title(), Bundle.CentralRepoIngestModule_errorMessage_isNotEnabled()); - } - } - return; - } - Case autopsyCase; try { - autopsyCase = Case.getCurrentCaseThrows(); - } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Exception while getting open case.", ex); - throw new IngestModuleException("Exception while getting open case.", ex); - } - - // Don't allow sqlite central repo databases to be used for multi user cases - if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) - && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { - logger.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository."); - throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS - } - jobId = context.getJobId(); - - CentralRepository centralRepoDb; - try { - centralRepoDb = CentralRepository.getInstance(); + filesType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS - throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS + throw new IngestModuleException(Bundle.CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg(), ex); } - try { - filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS - throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS - } - - try { - eamCase = centralRepoDb.getCase(autopsyCase); - } catch (CentralRepoException ex) { - throw new IngestModuleException("Unable to get case from central repository database ", ex); - } - - try { - eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource()); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS - throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS - } - // TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter - // if we are the first thread / module for this job, then make sure the case - // and image exist in the DB before we associate artifacts with it. - if (refCounter.incrementAndGet(jobId) - == 1) { - // ensure we have this data source in the EAM DB + /* + * The first module instance started for this job makes sure the current + * case and data source are in the central repository. Currently + * (11/8/21), these are cached upon creation / first retreival. + */ + if (refCounter.incrementAndGet(context.getJobId()) == 1) { + Case currentCase; try { - if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) { - centralRepoDb.newDataSource(eamDataSource); - } + currentCase = Case.getCurrentCaseThrows(); + } catch (NoCurrentCaseException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex); + } + + CorrelationCase centralRepoCase; + try { + centralRepoCase = centralRepo.getCase(currentCase); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS - throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS + throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrCaseErrMsg(), ex); } - } - } - - /** - * Post a new "previously seen" artifact for the file marked bad. - * - * @param abstractFile The file from which to create an artifact. - * @param caseDisplayNames Case names to be added to a TSK_COMMON attribute. - */ - private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List caseDisplayNames, CorrelationAttributeInstance.Type aType, String value) { - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously marked as notable in cases " + prevCases; - Collection attributes = Arrays.asList( - new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.CentralRepoIngestModule_prevTaggedSet_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - try { - // Create artifact if it doesn't already exist. - if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) { - BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult( - BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE, - null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) - .getAnalysisResult(); - try { - // index the artifact for keyword search - blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId()); - } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS - } - // send inbox message - sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash(), caseDisplayNames); + try { + CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource()); + } catch (CentralRepoException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrDataSourceErrMsg(), ex); } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS - } catch (IllegalStateException ex) { - logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS } } - @Messages({ - "CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
", - "CentralRepoIngestModel_name_header=Name:
", - "CentralRepoIngestModel_previous_case_header=
Previous Cases:
", - "# {0} - Name of file that is Notable", - "CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}" - }) - - /** - * Post a message to the ingest inbox alerting the user that a bad file was - * found. - * - * @param artifact badFile Blackboard Artifact - * @param name badFile's name - * @param md5Hash badFile's md5 hash - * @param caseDisplayNames List of cases that the artifact appears in. - */ - private void sendBadFileInboxMessage(BlackboardArtifact artifact, String name, String md5Hash, List caseDisplayNames) { - StringBuilder detailsSb = new StringBuilder(1024); - - detailsSb.append(Bundle.CentralRepoIngestModule_notable_message_header()).append(Bundle.CentralRepoIngestModel_name_header()); - detailsSb.append(name).append(Bundle.CentralRepoIngestModel_previous_case_header()); - for (String str : caseDisplayNames) { - detailsSb.append(str).append("
"); - } - detailsSb.append(""); - services.postMessage(IngestMessage.createDataMessage(CentralRepoIngestModuleFactory.getModuleName(), - Bundle.CentralRepoIngestModule_postToBB_knownBadMsg(name), - detailsSb.toString(), - name + md5Hash, - artifact)); - } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java index ddece9731c..8a5e122e6c 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java @@ -128,7 +128,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter { @Override public DataArtifactIngestModule createDataArtifactIngestModule(IngestModuleIngestJobSettings settings) { - return new CentralRepoDataArtifactIngestModule(); + return new CentralRepoDataArtifactIngestModule((IngestSettings) settings); } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java new file mode 100755 index 0000000000..e1f2582478 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java @@ -0,0 +1,336 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.ingestmodule; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.logging.Level; +import java.util.stream.Collectors; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.AnalysisResult; +import org.sleuthkit.datamodel.Blackboard; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataArtifact; +import org.sleuthkit.datamodel.Score; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Utility methods shared by the central repository ingest modules. + */ +class CentralRepoIngestModuleUtils { + + private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName()); + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + private final static String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); + + /** + * Gets any previous occurrences of a given correlation attribute in cases + * other than the current case. + * + * @param corrAttr The correlation attribute. + * + * @return The other occurrences of the correlation attribute. + */ + static List getOccurrencesInOtherCases(CorrelationAttributeInstance corrAttr, long ingestJobId) { + List previousOccurrences = new ArrayList<>(); + try { + CentralRepository centralRepo = CentralRepository.getInstance(); + previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { + CorrelationAttributeInstance prevOccurrence = iterator.next(); + if (prevOccurrence.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) { + iterator.remove(); + } + } + } catch (CorrelationAttributeNormalizationException ex) { + LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value for 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS + } + return previousOccurrences; + } + + /** + * Makes a previously notable analysis result for a content. + * + * @param content The content. + * @param previousCases The names of the cases in which the artifact was + * deemed notable. + * @param corrAttrType The type of the matched correlation attribute. + * @param corrAttrValue The value of the matched correlation attribute. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)", + "# {0} - list of cases", + "CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}" + }) + static void makePrevNotableAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) { + String prevCases = previousCases.stream().collect(Collectors.joining(",")); + String justification = Bundle.CentralRepoIngestModule_notableJustification(prevCases); + Collection attributes = Arrays.asList( + new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_notableSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + Optional result = makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification, dataSourceObjId, ingestJobId); + if (result.isPresent()) { + postNotableMessage(content, previousCases, corrAttrValue, result.get()); + } + } + + /** + * Makes a previously seen analysis result for a content, unless the content + * is too common. + * + * @param content The content. + * @param previousCases The names of the cases in which the artifact was + * previously seen. + * @param corrAttrType The type of the matched correlation attribute. + * @param corrAttrValue The value of the matched correlation attribute. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)", + "# {0} - list of cases", + "CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}" + }) + static void makePrevSeenAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) { + Optional score = calculateScore(previousCases.size()); + if (score.isPresent()) { + String prevCases = previousCases.stream().collect(Collectors.joining(",")); + String justification = Bundle.CentralRepoIngestModule_prevSeenJustification(prevCases); + Collection analysisResultAttributes = Arrays.asList( + new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_prevSeenSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score.get(), justification, dataSourceObjId, ingestJobId); + } + } + + /** + * Makes a previously unseen analysis result for a content. + * + * @param content The content. + * @param corrAttrType The type of the new correlation attribute. + * @param corrAttrValue The value of the new correlation attribute. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases" + }) + static void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) { + Collection attributesForNewArtifact = Arrays.asList( + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue)); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CentralRepoIngestModule_prevUnseenJustification(), dataSourceObjId, ingestJobId); + } + + /** + * Calculates a score based in a number of previous cases. + * + * @param numPreviousCases The number of previous cases. + * + * @return An Optional of a score, will be empty if there is no score + * because the number of previous cases is too high, indicating a + * common and therefore uninteresting item. + */ + static Optional calculateScore(int numPreviousCases) { + Score score = null; + if (numPreviousCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { + score = Score.SCORE_LIKELY_NOTABLE; + } else if (numPreviousCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numPreviousCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { + score = Score.SCORE_NONE; + } + return Optional.ofNullable(score); + } + + /** + * Makes a new analysis result of a given type for a content and posts it to + * the blackboard. + * + * @param content The content. + * @param analysisResultType The type of analysis result to make. + * @param analysisResultAttrs The attributes of the new analysis result. + * @param configuration The configuration for the new analysis result. + * @param score The score for the new analysis result. + * @param justification The justification for the new analysis result. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + * + * @return The analysis result or null if the result already existed or an + * error that prevented creation of the analysis result occurred. + */ + private static Optional makeAndPostAnalysisResult(Content content, BlackboardArtifact.Type analysisResultType, Collection analysisResultAttrs, String configuration, Score score, String justification, long dataSourceObjId, long ingestJobId) { + AnalysisResult analysisResult = null; + try { + Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); + if (!blackboard.artifactExists(content, analysisResultType, analysisResultAttrs)) { + analysisResult = content.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs, dataSourceObjId).getAnalysisResult(); + try { + blackboard.postArtifact(analysisResult, MODULE_NAME, ingestJobId); + } catch (Blackboard.BlackboardException ex) { + LOGGER.log(Level.SEVERE, String.format("Error posting analysis result '%s' to blackboard for content 's' (job ID=%d)", analysisResult, content, ingestJobId), ex); //NON-NLS + } + } + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error creating %s analysis result for content '%s' (job ID=%d)", analysisResultType, content, ingestJobId), ex); // NON-NLS + } + return Optional.ofNullable(analysisResult); + } + + /** + * Posts a message to the ingest messages inbox to notify the user that a + * notable content has been found, i.e., a previously notable analysis + * result has been created. + * + * @param content The notable content. + * @param otherCases The other cases in which the content was marked as + * notable. + * @param corrAttrValue The correlation attribute value used to identify + * the content, used by the ingest inbox as a unique + * key for message grouping. + * @param analysisResult The previously notable analysis result. + */ + @NbBundle.Messages({ + "# {0} - Name of item that is Notable", + "CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0}" + }) + private static void postNotableMessage(Content content, Set otherCases, String corrAttrValue, AnalysisResult analysisResult) { + String msgSubject = null; + String msgDetails = null; + String msgKey = corrAttrValue; + if (content instanceof AbstractFile) { + AbstractFile file = (AbstractFile) content; + msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(file.getName()); + msgDetails = makeNotableFileMessage(file, otherCases); + } else if (content instanceof DataArtifact) { + DataArtifact artifact = (DataArtifact) content; + msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(artifact.getDisplayName()); + msgDetails = makeNotableDataArtifactMessage(artifact, corrAttrValue, otherCases); + } else { + LOGGER.log(Level.SEVERE, "Unsupported Content, cannot post ingest inbox message"); + } + if (msgSubject != null && msgDetails != null) { + IngestServices.getInstance().postMessage( + IngestMessage.createDataMessage( + MODULE_NAME, + msgSubject, + msgDetails, + msgKey, + analysisResult)); + } + } + + /** + * Makes an ingest inbox message for a notable file. Uses similar HTML + * markup as is used for this purpose by the hash lookup ingest module. + * + * @param file The notable file. + * @param otherCases The cases other than the current case in which the file + * was marked as nmotable. + * + * @return The message. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_filename_inbox_msg_header=File Name", + "CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash", + "CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases" + }) + private static String makeNotableFileMessage(AbstractFile file, Set otherCases) { + StringBuilder message = new StringBuilder(1024); + message.append(""); //NON-NLS + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_filename_inbox_msg_header(), file.getName()); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_md5Hash_inbox_msg_header(), file.getMd5Hash()); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(","))); + return message.toString(); + } + + /** + * Makes an ingest inbox message for a notable data artifact. Uses similar + * HTML markup as is used for this purpose by the hash lookup ingest module. + * + * @param artifact The data artifact + * @param corrAttrValue The notable attribute (correlation attribute value). + * @param otherCases The cases other than the current case in which the + * artifact was marked as nmotable. + * + * @return The message. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type", + "CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute" + }) + private static String makeNotableDataArtifactMessage(DataArtifact artifact, String corrAttrValue, Set otherCases) { + StringBuilder message = new StringBuilder(1024); + message.append("
"); //NON-NLS + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_artifact_type_inbox_msg_header(), artifact.getDisplayName()); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_notable_attr_inbox_msg_header(), corrAttrValue); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(","))); + message.append("
"); //NON-NLS + return message.toString(); + } + + /** + * Adds a table row to a notable item message (HTML). + * + * @param message The string builder for the message. + * @param headerText The table row header text. + * @param cellText The table row cell text. + */ + private static void addTableRowMarkup(StringBuilder message, String headerText, String cellText) { + message.append(""); //NON-NLS + message.append("").append(headerText).append(""); //NON-NLS + message.append("").append(cellText).append(""); //NON-NLS + message.append(""); //NON-NLS + } + + /* + * Prevents instatiation of this utility class. + */ + private CentralRepoIngestModuleUtils() { + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java index e55f09882e..027403490b 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java @@ -26,7 +26,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings; final class IngestSettings implements IngestModuleIngestJobSettings { private static final long serialVersionUID = 1L; - + static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false; + static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false; + static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false; + static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; + private final boolean flagTaggedNotableItems; private final boolean flagPreviousDevices; private final boolean createCorrelationProperties; @@ -36,10 +40,10 @@ final class IngestSettings implements IngestModuleIngestJobSettings { * Instantiate the ingest job settings with default values. */ IngestSettings() { - this.flagTaggedNotableItems = CentralRepoIngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS; - this.flagPreviousDevices = CentralRepoIngestModule.DEFAULT_FLAG_PREVIOUS_DEVICES; - this.createCorrelationProperties = CentralRepoIngestModule.DEFAULT_CREATE_CR_PROPERTIES; - this.flagUniqueArtifacts = CentralRepoIngestModule.DEFAULT_FLAG_UNIQUE_DEVICES; + this.flagTaggedNotableItems = DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS; + this.flagPreviousDevices = DEFAULT_FLAG_PREVIOUS_DEVICES; + this.createCorrelationProperties = DEFAULT_CREATE_CR_PROPERTIES; + this.flagUniqueArtifacts = DEFAULT_FLAG_UNIQUE_DEVICES; } /** diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties index 5ee772af47..5b391d9d75 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties @@ -247,3 +247,11 @@ DataResultPanel.pagesLabel.text=Pages: DataResultPanel.pageNumLabel.text= DataResultPanel.pageNextButton.text= DataResultPanel.pagePrevButton.text= + +DataResultViewerThumbnail.pageLabel.text=Page: +DataResultViewerThumbnail.pagesLabel.text=Pages: +DataResultViewerThumbnail.pagePrevButton.text= +DataResultViewerThumbnail.pageNextButton.text= +DataResultViewerThumbnail.pageNumLabel.text=- +DataResultViewerThumbnail.goToPageLabel.text=Go to Page: +DataResultViewerThumbnail.goToPageField.text= diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED index 07a42f5e19..cde18d3900 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED @@ -72,9 +72,9 @@ DataContentViewerHex.totalPageLabel.text_1=100 DataContentViewerHex.pageLabel2.text=Page # Product Information panel -LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
+LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
Format_OperatingSystem_Value={0} version {1} running on {2} -LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
+LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
SortChooser.dialogTitle=Choose Sort Criteria ThumbnailViewChildren.progress.cancelling=(Cancelling) # {0} - file name @@ -97,7 +97,7 @@ DataContentViewerHex.goToPageTextField.text= DataContentViewerHex.goToPageLabel.text=Go to Page: DataResultViewerThumbnail.imagesLabel.text=Images: DataResultViewerThumbnail.imagesRangeLabel.text=- -DataResultViewerThumbnail.filePathLabel.text=\ \ \ +DataResultViewerThumbnail.filePathLabel.text=\ AdvancedConfigurationDialog.cancelButton.text=Cancel DataArtifactContentViewer.waitText=Retrieving and preparing data, please wait... DataArtifactContentViewer.errorText=Error retrieving result @@ -311,3 +311,11 @@ DataResultPanel.pagesLabel.text=Pages: DataResultPanel.pageNumLabel.text= DataResultPanel.pageNextButton.text= DataResultPanel.pagePrevButton.text= + +DataResultViewerThumbnail.pageLabel.text=Page: +DataResultViewerThumbnail.pagesLabel.text=Pages: +DataResultViewerThumbnail.pagePrevButton.text= +DataResultViewerThumbnail.pageNextButton.text= +DataResultViewerThumbnail.pageNumLabel.text=- +DataResultViewerThumbnail.goToPageLabel.text=Go to Page: +DataResultViewerThumbnail.goToPageField.text= diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 84035a1624..2f37450d2f 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -62,11 +62,14 @@ import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageCountChangeEvent; import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageSizeChangeEvent; import org.sleuthkit.autopsy.datamodel.NodeSelectionInfo; import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSetFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsDAO.CommAccountFetcher; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; @@ -76,8 +79,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; -import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.OsAccountsDAO.AccountFetcher; import org.sleuthkit.autopsy.mainui.datamodel.OsAccountsSearchParams; import org.sleuthkit.autopsy.mainui.nodes.SearchResultRootNode; @@ -87,7 +90,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeExtFetcher; import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeMimeFetcher; import org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.FileTypeSizeFetcher; -import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.autopsy.mainui.nodes.SearchManager; /** @@ -140,21 +142,9 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PreferenceChangeListener pageSizeListener = (PreferenceChangeEvent evt) -> { if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) { - int newPageSize = UserPreferences.getResultsTablePageSize(); - nodeNameToPageCountListenerMap.values().forEach((ps) -> { ps.postPageSizeChangeEvent(); }); - - try { - if (this.searchResultManager != null) { - DAOFetcher previousFetcher = this.searchResultManager.getDaoFetcher(); - this.searchResultManager = new SearchManager(previousFetcher, newPageSize); - displaySearchResults(this.searchResultManager.getResults(), false); - } - } catch (IllegalArgumentException | ExecutionException ex) { - logger.log(Level.WARNING, "There was an error while updating page size", ex); - } } }; @@ -162,10 +152,11 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PropertyChangeListener caseEventListener = evt -> { String evtName = evt.getPropertyName(); - if (Case.Events.DATA_SOURCE_ADDED.toString().equals(evtName)) { - refreshSearchResultChildren(); - } else if (Case.Events.CURRENT_CASE.toString().equals(evtName) && evt.getNewValue() == null) { - nodeNameToPageCountListenerMap.clear(); + if (Case.Events.CURRENT_CASE.toString().equals(evtName)) { + searchResultManager = null; + if (evt.getNewValue() == null) { + nodeNameToPageCountListenerMap.clear(); + } } }; @@ -176,27 +167,19 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); - private final PropertyChangeListener ingestModuleListener = evt -> { - if (this.searchResultManager != null && this.searchResultManager.isRefreshRequired(evt)) { - refreshSearchResultChildren(); + private final MainDAO mainDAO = MainDAO.getInstance(); + + private final PropertyChangeListener DAOListener = evt -> { + SearchManager manager = this.searchResultManager; + if (manager != null && evt != null && evt.getNewValue() instanceof DAOAggregateEvent) { + DAOAggregateEvent daoAggrEvt = (DAOAggregateEvent) evt.getNewValue(); + if (daoAggrEvt.getEvents().stream().anyMatch((daoEvt) -> manager.isRefreshRequired(daoEvt))) { + refreshSearchResultChildren(); + } } }; - private final PropertyChangeListener weakIngestModuleListener = WeakListeners.propertyChange(ingestModuleListener, null); - - private static final Set INGEST_JOB_EVENTS = EnumSet.of( - IngestManager.IngestJobEvent.COMPLETED, - IngestManager.IngestJobEvent.CANCELLED); - - private final PropertyChangeListener ingestJobListener = (PropertyChangeEvent evt) -> { - String eventType = evt.getPropertyName(); - if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) - || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { - refreshSearchResultChildren(); - } - }; - - private final PropertyChangeListener weakIngestJobListener = WeakListeners.propertyChange(ingestJobListener, null); + private final PropertyChangeListener weakDAOListener = WeakListeners.propertyChange(DAOListener, mainDAO); /** * Creates and opens a Swing JPanel with a JTabbedPane child component that @@ -461,8 +444,8 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private void initListeners() { UserPreferences.addChangeListener(this.pageSizeListener); Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this.weakCaseEventListener); - IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakIngestModuleListener); - IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener); + this.mainDAO.getResultEventsManager().addPropertyChangeListener(this.weakDAOListener); + IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakDAOListener); } /** @@ -471,8 +454,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private void closeListeners() { UserPreferences.removeChangeListener(this.pageSizeListener); Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), this.weakCaseEventListener); - IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakIngestModuleListener); - IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener); + this.mainDAO.getResultEventsManager().removePropertyChangeListener(this.weakDAOListener); } /** @@ -512,7 +494,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C // if search result root node, it's fine; otherwise, wrap in result // viewer filter node to make sure there are no grandchildren - this.currentRootNode = (rootNode instanceof SearchResultRootNode) + this.currentRootNode = (rootNode instanceof SearchResultRootNode) ? rootNode : new ResultViewerFilterParentNode(rootNode); @@ -1187,6 +1169,27 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C } } + /** + * Displays results for querying the DAO for accounts matching the search + * parameters query. + * + * @param accountParams The search parameter query. + */ + void displayAccounts(CommAccountsSearchParams accountParams) { + try { + this.searchResultManager = new SearchManager(new CommAccountFetcher(accountParams), getPageSize()); + SearchResultsDTO results = searchResultManager.getResults(); + displaySearchResults(results, true); + } catch (ExecutionException ex) { + logger.log(Level.WARNING, + MessageFormat.format("There was an error displaying search results for [artifact type: {0}, data source id: {1}, account type: {2}]", + accountParams.getType(), + accountParams.getDataSourceId() == null ? "" : accountParams.getDataSourceId(), + accountParams.getType()), + ex); + } + } + void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) { try { this.searchResultManager = new SearchManager(new AnalysisResultFetcher(analysisResultParams), getPageSize()); @@ -1270,7 +1273,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayFileSizes(FileTypeSizeSearchParams fileSizeKey) { try { - this.searchResultManager = new SearchManager(new FileTypeSizeFetcher(fileSizeKey), getPageSize()); + this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeSizeFetcher(fileSizeKey), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException | IllegalArgumentException ex) { @@ -1321,7 +1324,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ex); } } - + /** * Displays results of querying the DAO for the given search parameters * query. @@ -1361,7 +1364,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ex); } } - + /** * Displays results of querying the DAO for the given search parameters * query. @@ -1420,7 +1423,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ private void refreshSearchResultChildren() { try { - refreshSearchResultChildren(this.searchResultManager.getRefreshedData()); + refreshSearchResultChildren(this.searchResultManager.getResults()); } catch (ExecutionException | IllegalArgumentException ex) { logger.log(Level.WARNING, "There was an error refreshing data: ", ex); } @@ -1513,6 +1516,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C /** * Main constructor. + * * @param original The original node to wrap. */ ResultViewerFilterParentNode(Node original) { diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java index 2be5daea6d..1994b158cd 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java @@ -46,6 +46,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; @@ -481,6 +482,16 @@ public final class DataResultTopComponent extends TopComponent implements DataRe public void displayOsAccounts(OsAccountsSearchParams osAccountParams) { dataResultPanel.displayOsAccount(osAccountParams); } + + /** + * Displays results for querying the DAO for accounts matching the search + * parameters query. + * + * @param accountParams The search parameter query. + */ + public void displayAccounts(CommAccountsSearchParams accountParams) { + dataResultPanel.displayAccounts(accountParams); + } @Override public void setTitle(String title) { diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java index f844adccb8..dffd0631ce 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java @@ -528,13 +528,15 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - TableFilterNode tfn = (TableFilterNode) rootNode; + if (rootNode instanceof TableFilterNode || searchResults != null) { + TableFilterNode tfn = searchResults == null ? (TableFilterNode) rootNode : null; final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); final ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel(); for (Map.Entry entry : columnMap.entrySet()) { String columnName = entry.getKey(); - final String columnHiddenKey = ResultViewerPersistence.getColumnHiddenKey(tfn, columnName); + final String columnHiddenKey = + tfn != null ? ResultViewerPersistence.getColumnHiddenKey(tfn, columnName) : + ResultViewerPersistence.getColumnHiddenKey(searchResults, columnName); final TableColumn column = entry.getValue(); boolean columnHidden = columnModel.isColumnHidden(column); if (columnHidden) { @@ -554,12 +556,14 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - TableFilterNode tfn = (TableFilterNode) rootNode; + if (rootNode instanceof TableFilterNode || searchResults != null) { + TableFilterNode tfn = searchResults == null ? (TableFilterNode) rootNode : null; final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); // Store the current order of the columns into settings for (Map.Entry> entry : propertiesMap.entrySet()) { - preferences.putInt(ResultViewerPersistence.getColumnPositionKey(tfn, entry.getValue().getName()), entry.getKey()); + preferences.putInt(tfn != null ? + ResultViewerPersistence.getColumnPositionKey(tfn, entry.getValue().getName()) : + ResultViewerPersistence.getColumnPositionKey(searchResults, entry.getValue().getName()), entry.getKey()); } } } @@ -571,16 +575,20 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - final TableFilterNode tfn = ((TableFilterNode) rootNode); + if (rootNode instanceof TableFilterNode || searchResults != null) { + final TableFilterNode tfn = searchResults == null ? ((TableFilterNode) rootNode) : null; final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel(); for (Map.Entry entry : columnMap.entrySet()) { ETableColumn etc = entry.getValue(); String columnName = entry.getKey(); //store sort rank and order - final String columnSortOrderKey = ResultViewerPersistence.getColumnSortOrderKey(tfn, columnName); - final String columnSortRankKey = ResultViewerPersistence.getColumnSortRankKey(tfn, columnName); + final String columnSortOrderKey = + searchResults == null ? ResultViewerPersistence.getColumnSortOrderKey(tfn, columnName) : + ResultViewerPersistence.getColumnSortOrderKey(searchResults, columnName); + final String columnSortRankKey = + searchResults == null ? ResultViewerPersistence.getColumnSortRankKey(tfn, columnName): + ResultViewerPersistence.getColumnSortRankKey(searchResults, columnName); if (etc.isSorted() && (columnModel.isColumnHidden(etc) == false)) { preferences.putBoolean(columnSortOrderKey, etc.isAscending()); preferences.putInt(columnSortRankKey, etc.getSortRank()); @@ -590,7 +598,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer { preferences.remove(columnSortRankKey); } } - } + } } /** @@ -603,17 +611,23 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - final TableFilterNode tfn = (TableFilterNode) rootNode; + if (rootNode instanceof TableFilterNode || searchResults != null) { + final TableFilterNode tfn = (searchResults == null ? (TableFilterNode) rootNode : null); final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); //organize property sorting information, sorted by rank TreeSet sortInfos = new TreeSet<>(Comparator.comparing(ColumnSortInfo::getRank)); propertiesMap.entrySet().stream().forEach(entry -> { final String propName = entry.getValue().getName(); //if the sort rank is undefined, it will be defaulted to 0 => unsorted. - Integer sortRank = preferences.getInt(ResultViewerPersistence.getColumnSortRankKey(tfn, propName), 0); + Integer sortRank = preferences.getInt( + tfn != null ? + ResultViewerPersistence.getColumnSortRankKey(tfn, propName) : + ResultViewerPersistence.getColumnSortRankKey(searchResults, propName), 0); //default to true => ascending - Boolean sortOrder = preferences.getBoolean(ResultViewerPersistence.getColumnSortOrderKey(tfn, propName), true); + Boolean sortOrder = preferences.getBoolean( + tfn != null ? + ResultViewerPersistence.getColumnSortOrderKey(tfn, propName) : + ResultViewerPersistence.getColumnSortOrderKey(searchResults, propName), true); sortInfos.add(new ColumnSortInfo(entry.getKey(), sortRank, sortOrder)); }); //apply sort information in rank order. @@ -629,13 +643,16 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { + if (rootNode instanceof TableFilterNode || searchResults != null) { final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); - final TableFilterNode tfn = ((TableFilterNode) rootNode); + final TableFilterNode tfn = (searchResults == null ? ((TableFilterNode) rootNode) : null); ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel(); for (Map.Entry> entry : propertiesMap.entrySet()) { final String propName = entry.getValue().getName(); - boolean hidden = preferences.getBoolean(ResultViewerPersistence.getColumnHiddenKey(tfn, propName), false); + boolean hidden = preferences.getBoolean( + tfn != null ? + ResultViewerPersistence.getColumnHiddenKey(tfn, propName) : + ResultViewerPersistence.getColumnHiddenKey(searchResults, propName), false); final TableColumn column = columnMap.get(propName); columnModel.setColumnHidden(column, hidden); } @@ -653,16 +670,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer { private synchronized List> loadColumnOrder() { if (searchResults != null) { - return searchResults.getColumns().stream() - .map(columnKey -> { - return new NodeProperty<>( - columnKey.getFieldName(), - columnKey.getDisplayName(), - columnKey.getDescription(), - "" - ); - }) - .collect(Collectors.toList()); + return loadColumnOrderForSearchResults(); } List> props = ResultViewerPersistence.getAllChildProperties(rootNode, 100); @@ -705,6 +713,51 @@ public class DataResultViewerTable extends AbstractDataResultViewer { return new ArrayList<>(propertiesMap.values()); } + + private synchronized List> loadColumnOrderForSearchResults() { + List> props = searchResults.getColumns().stream() + .map(columnKey -> { + return new NodeProperty<>( + columnKey.getFieldName(), + columnKey.getDisplayName(), + columnKey.getDescription(), + "" + ); + }) + .collect(Collectors.toList()); + + propertiesMap.clear(); + + /* + * We load column index values into the properties map. If a property's + * index is outside the range of the number of properties or the index + * has already appeared as the position of another property, we put that + * property at the end. + */ + int offset = props.size(); + + final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); + + for (Property prop : props) { + Integer value = preferences.getInt(ResultViewerPersistence.getColumnPositionKey(searchResults, prop.getName()), -1); + if (value >= 0 && value < offset && !propertiesMap.containsKey(value)) { + propertiesMap.put(value, prop); + } else { + propertiesMap.put(offset, prop); + offset++; + } + } + + /* + * NOTE: it is possible to have "discontinuities" in the keys (i.e. + * column numbers) of the map. This happens when some of the columns had + * a previous setting, and other columns did not. We need to make the + * keys 0-indexed and continuous. + */ + compactPropertiesMap(); + + return new ArrayList<>(propertiesMap.values()); + } /** * Makes properties map 0-indexed and re-arranges elements to make sure the diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form index c8bd1c3d78..798b0b51bd 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form @@ -27,6 +27,200 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java index 6c5a23ad67..1de09c893c 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java @@ -27,9 +27,9 @@ import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.logging.Level; -import java.util.prefs.PreferenceChangeListener; import java.util.prefs.Preferences; import java.util.stream.Collectors; +import javax.swing.JOptionPane; import javax.swing.ListSelectionModel; import javax.swing.SortOrder; import javax.swing.SwingUtilities; @@ -42,10 +42,13 @@ import org.openide.explorer.ExplorerManager; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.nodes.Node; +import org.openide.nodes.NodeEvent; +import org.openide.nodes.NodeListener; +import org.openide.nodes.NodeMemberEvent; +import org.openide.nodes.NodeReorderEvent; import org.openide.util.NbBundle; import org.openide.util.NbPreferences; import org.openide.util.lookup.ServiceProvider; -import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.corecomponentinterfaces.DataResultViewer; import static org.sleuthkit.autopsy.corecomponents.Bundle.*; import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion; @@ -73,9 +76,13 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(DataResultViewerThumbnail.class.getName()); + private final PageUpdater pageUpdater = new PageUpdater(); private TableFilterNode rootNode; private ThumbnailViewChildren rootNodeChildren; private NodeSelectionListener selectionListener; + private int currentPage; + private int totalPages; + private int currentPageImages; private int thumbSize = ImageUtils.ICON_SIZE_MEDIUM; /** @@ -112,7 +119,10 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { Bundle.DataResultViewerThumbnail_thumbnailSizeComboBox_medium(), Bundle.DataResultViewerThumbnail_thumbnailSizeComboBox_large()})); thumbnailSizeComboBox.setSelectedIndex(1); - + currentPage = -1; + totalPages = 0; + currentPageImages = 0; + // The GUI builder is using FlowLayout therefore this change so have no // impact on the initally designed layout. This change will just effect // how the components are laid out as size of the window changes. @@ -130,6 +140,20 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { java.awt.GridBagConstraints gridBagConstraints; buttonBarPanel = new javax.swing.JPanel(); + pagesPanel = new javax.swing.JPanel(); + pageNumberPane = new javax.swing.JPanel(); + pageLabel = new javax.swing.JLabel(); + pageNumLabel = new javax.swing.JLabel(); + pageButtonPanel = new javax.swing.JPanel(); + pagesLabel = new javax.swing.JLabel(); + pagePrevButton = new javax.swing.JButton(); + pageNextButton = new javax.swing.JButton(); + pageGotoPane = new javax.swing.JPanel(); + goToPageLabel = new javax.swing.JLabel(); + goToPageField = new javax.swing.JTextField(); + imagePane = new javax.swing.JPanel(); + imagesLabel = new javax.swing.JLabel(); + imagesRangeLabel = new javax.swing.JLabel(); thumbnailSizeComboBox = new javax.swing.JComboBox<>(); sortPane = new javax.swing.JPanel(); sortLabel = new javax.swing.JLabel(); @@ -141,6 +165,140 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { buttonBarPanel.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT)); + pagesPanel.setLayout(new java.awt.GridBagLayout()); + + pageNumberPane.setLayout(new java.awt.GridBagLayout()); + + pageLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + pageNumberPane.add(pageLabel, gridBagConstraints); + + pageNumLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageNumLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + pageNumberPane.add(pageNumLabel, gridBagConstraints); + + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + pagesPanel.add(pageNumberPane, gridBagConstraints); + + buttonBarPanel.add(pagesPanel); + + pageButtonPanel.setLayout(new java.awt.GridBagLayout()); + + pagesLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pagesLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + pageButtonPanel.add(pagesLabel, gridBagConstraints); + + pagePrevButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back.png"))); // NOI18N + pagePrevButton.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pagePrevButton.text")); // NOI18N + pagePrevButton.setBorder(null); + pagePrevButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_disabled.png"))); // NOI18N + pagePrevButton.setFocusable(false); + pagePrevButton.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); + pagePrevButton.setMargin(new java.awt.Insets(0, 0, 0, 0)); + pagePrevButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_hover.png"))); // NOI18N + pagePrevButton.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); + pagePrevButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + pagePrevButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + pageButtonPanel.add(pagePrevButton, gridBagConstraints); + + pageNextButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward.png"))); // NOI18N + pageNextButton.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageNextButton.text")); // NOI18N + pageNextButton.setBorder(null); + pageNextButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_disabled.png"))); // NOI18N + pageNextButton.setFocusable(false); + pageNextButton.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); + pageNextButton.setMargin(new java.awt.Insets(0, 0, 0, 0)); + pageNextButton.setMaximumSize(new java.awt.Dimension(27, 23)); + pageNextButton.setMinimumSize(new java.awt.Dimension(27, 23)); + pageNextButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_hover.png"))); // NOI18N + pageNextButton.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); + pageNextButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + pageNextButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 2; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + pageButtonPanel.add(pageNextButton, gridBagConstraints); + + buttonBarPanel.add(pageButtonPanel); + + pageGotoPane.setLayout(new java.awt.GridBagLayout()); + + goToPageLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.goToPageLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + pageGotoPane.add(goToPageLabel, gridBagConstraints); + + goToPageField.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.goToPageField.text")); // NOI18N + goToPageField.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + goToPageFieldActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.ipadx = 75; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + pageGotoPane.add(goToPageField, gridBagConstraints); + + buttonBarPanel.add(pageGotoPane); + + imagePane.setLayout(new java.awt.GridBagLayout()); + + imagesLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.imagesLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + imagePane.add(imagesLabel, gridBagConstraints); + + imagesRangeLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.imagesRangeLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + imagePane.add(imagesRangeLabel, gridBagConstraints); + + buttonBarPanel.add(imagePane); + thumbnailSizeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { thumbnailSizeComboBoxActionPerformed(evt); @@ -181,6 +339,18 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { add(iconView, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents + private void pagePrevButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pagePrevButtonActionPerformed + previousPage(); + }//GEN-LAST:event_pagePrevButtonActionPerformed + + private void pageNextButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pageNextButtonActionPerformed + nextPage(); + }//GEN-LAST:event_pageNextButtonActionPerformed + + private void goToPageFieldActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_goToPageFieldActionPerformed + goToPage(goToPageField.getText()); + }//GEN-LAST:event_goToPageFieldActionPerformed + private void thumbnailSizeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_thumbnailSizeComboBoxActionPerformed int newIconSize; switch (thumbnailSizeComboBox.getSelectedIndex()) { @@ -199,14 +369,14 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { if (thumbSize != newIconSize) { thumbSize = newIconSize; Node root = this.getExplorerManager().getRootContext(); - this.rootNodeChildren.setThumbsSize(thumbSize); + ((ThumbnailViewChildren) root.getChildren()).setThumbsSize(thumbSize); // Temporarily set the explored context to the root, instead of a child node. // This is a workaround hack to convince org.openide.explorer.ExplorerManager to // update even though the new and old Node values are identical. This in turn // will cause the entire view to update completely. After this we // immediately set the node back to the current child by calling switchPage(). - this.getExplorerManager().setExploredContext(this.rootNode); + this.getExplorerManager().setExploredContext(root); switchPage(); } }//GEN-LAST:event_thumbnailSizeComboBoxActionPerformed @@ -253,7 +423,21 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JPanel buttonBarPanel; private javax.swing.JLabel filePathLabel; + private javax.swing.JTextField goToPageField; + private javax.swing.JLabel goToPageLabel; private org.openide.explorer.view.IconView iconView; + private javax.swing.JPanel imagePane; + private javax.swing.JLabel imagesLabel; + private javax.swing.JLabel imagesRangeLabel; + private javax.swing.JPanel pageButtonPanel; + private javax.swing.JPanel pageGotoPane; + private javax.swing.JLabel pageLabel; + private javax.swing.JButton pageNextButton; + private javax.swing.JLabel pageNumLabel; + private javax.swing.JPanel pageNumberPane; + private javax.swing.JButton pagePrevButton; + private javax.swing.JLabel pagesLabel; + private javax.swing.JPanel pagesPanel; private javax.swing.JButton sortButton; private javax.swing.JLabel sortLabel; private javax.swing.JPanel sortPane; @@ -265,7 +449,7 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { return (selectedNode != null); } - @Override + @Override public void setNode(Node givenNode) { setNode(givenNode, null); } @@ -273,7 +457,7 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { @Override public void setNode(Node givenNode, SearchResultsDTO searchResults) { // GVDTODO givenNode cannot be assumed to be a table filter node and search results needs to be captured. - + setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (selectionListener == null) { this.getExplorerManager().addPropertyChangeListener(new NodeSelectionListener()); @@ -289,19 +473,23 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { // case where the DataResultViewerThumbnail stands along from the // DataResultViewer. See DataResultViewer setNode for more information. if (givenNode != null && givenNode.getChildren().getNodesCount() > 0) { - + // GVDTODO this should be handled more elegantly - rootNode = (givenNode instanceof TableFilterNode) - ? (TableFilterNode) givenNode + rootNode = (givenNode instanceof TableFilterNode) + ? (TableFilterNode) givenNode : new TableFilterNode(givenNode, true); - + + /* * Wrap the given node in a ThumbnailViewChildren that will * produce ThumbnailPageNodes with ThumbnailViewNode children * from the child nodes of the given node. */ - rootNodeChildren = new ThumbnailViewChildren(rootNode, thumbSize); - final Node root = new AbstractNode(Children.create(rootNodeChildren, true)); + rootNodeChildren = new ThumbnailViewChildren(givenNode, thumbSize); + final Node root = new AbstractNode(rootNodeChildren); + + pageUpdater.setRoot(root); + root.addNodeListener(pageUpdater); this.getExplorerManager().setRootContext(root); } else { rootNode = null; @@ -328,7 +516,9 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { @Override public void resetComponent() { super.resetComponent(); - setNode(null); + this.totalPages = 0; + this.currentPage = -1; + currentPageImages = 0; updateControls(); } @@ -339,15 +529,59 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { super.clearComponent(); } - private void switchPage() { + private void nextPage() { + if (currentPage < totalPages) { + currentPage++; + switchPage(); + } + } + + private void previousPage() { + if (currentPage > 1) { + currentPage--; + switchPage(); + } + } + + private void goToPage(String pageNumText) { + int newPage; + try { + newPage = Integer.parseInt(pageNumText); + } catch (NumberFormatException e) { + //ignore input + return; + } + + if (newPage > totalPages || newPage < 1) { + JOptionPane.showMessageDialog(this, + NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.goToPageTextField.msgDlg", totalPages), + NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.goToPageTextField.err"), + JOptionPane.WARNING_MESSAGE); + return; + } + + currentPage = newPage; + switchPage(); + } + + private void switchPage() { SwingUtilities.invokeLater(() -> { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try { + pagePrevButton.setEnabled(false); + pageNextButton.setEnabled(false); + goToPageField.setEnabled(false); ProgressHandle progress = ProgressHandle.createHandle( NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.genThumbs")); progress.start(); progress.switchToIndeterminate(); - DataResultViewerThumbnail.this.rootNodeChildren.update(); + + ExplorerManager explorerManager = DataResultViewerThumbnail.this.getExplorerManager(); + Node root = explorerManager.getRootContext(); + Node pageNode = root.getChildren().getNodeAt(currentPage - 1); + explorerManager.setExploredContext(pageNode); + currentPageImages = pageNode.getChildren().getNodesCount(); + progress.finish(); } catch (Exception ex) { NotifyDescriptor d @@ -368,12 +602,26 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { "# {0} - sort criteria", "DataResultViewerThumbnail.sortLabel.textTemplate=Sorted by: {0}", "DataResultViewerThumbnail.sortLabel.text=Sorted by: ---"}) private void updateControls() { - if (rootNode != null && rootNode.getChildren().getNodesCount(true) > 0) { + if (totalPages == 0) { + pagePrevButton.setEnabled(false); + pageNextButton.setEnabled(false); + goToPageField.setEnabled(false); + pageNumLabel.setText(""); + imagesRangeLabel.setText(""); thumbnailSizeComboBox.setEnabled(false); sortButton.setEnabled(false); sortLabel.setText(DataResultViewerThumbnail_sortLabel_text()); } else { + pageNumLabel.setText(NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.pageNumbers.curOfTotal", + Integer.toString(currentPage), Integer.toString(totalPages))); + final int imagesFrom = (currentPage - 1) * ThumbnailViewChildren.IMAGES_PER_PAGE + 1; + final int imagesTo = currentPageImages + (currentPage - 1) * ThumbnailViewChildren.IMAGES_PER_PAGE; + imagesRangeLabel.setText(imagesFrom + "-" + imagesTo); + + pageNextButton.setEnabled(!(currentPage == totalPages)); + pagePrevButton.setEnabled(!(currentPage == 1)); + goToPageField.setEnabled(totalPages > 1); sortButton.setEnabled(true); thumbnailSizeComboBox.setEnabled(true); if (rootNode != null) { @@ -388,6 +636,88 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { } } + /** + * Listens for root change updates and updates the paging controls + */ + private class PageUpdater implements NodeListener { + + private Node root; + + void setRoot(Node root) { + this.root = root; + } + + @Override + public void propertyChange(PropertyChangeEvent evt) { + } + + @Override + public void childrenAdded(NodeMemberEvent nme) { + totalPages = root.getChildren().getNodesCount(); + + if (totalPages == 0) { + currentPage = -1; + updateControls(); + return; + } + + if (currentPage == -1 || currentPage > totalPages) { + currentPage = 1; + } + + //force load the curPage node + final Node pageNode = root.getChildren().getNodeAt(currentPage - 1); + + //em.setSelectedNodes(new Node[]{pageNode}); + if (pageNode != null) { + pageNode.addNodeListener(new NodeListener() { + @Override + public void childrenAdded(NodeMemberEvent nme) { + currentPageImages = pageNode.getChildren().getNodesCount(); + updateControls(); + } + + @Override + public void childrenRemoved(NodeMemberEvent nme) { + currentPageImages = 0; + updateControls(); + } + + @Override + public void childrenReordered(NodeReorderEvent nre) { + } + + @Override + public void nodeDestroyed(NodeEvent ne) { + } + + @Override + public void propertyChange(PropertyChangeEvent evt) { + } + }); + + DataResultViewerThumbnail.this.getExplorerManager().setExploredContext(pageNode); + } + + updateControls(); + } + + @Override + public void childrenRemoved(NodeMemberEvent nme) { + totalPages = 0; + currentPage = -1; + updateControls(); + } + + @Override + public void childrenReordered(NodeReorderEvent nre) { + } + + @Override + public void nodeDestroyed(NodeEvent ne) { + } + } + private class NodeSelectionListener implements PropertyChangeListener { @Override @@ -417,5 +747,5 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { } } } - } + } } diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java b/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java index a981b04112..645bfaadaa 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java @@ -28,6 +28,7 @@ import javax.swing.SortOrder; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.util.NbPreferences; +import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; final class ResultViewerPersistence { @@ -46,6 +47,10 @@ final class ResultViewerPersistence { static String getColumnPositionKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".column"; } + + static String getColumnPositionKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".column"; + } /** * Gets a key for the given node and a property of its child nodes to store @@ -59,6 +64,10 @@ final class ResultViewerPersistence { static String getColumnSortOrderKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".sortOrder"; } + + static String getColumnSortOrderKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".sortOrder"; + } /** * Gets a key for the given node and a property of its child nodes to store @@ -72,6 +81,10 @@ final class ResultViewerPersistence { static String getColumnSortRankKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".sortRank"; } + + static String getColumnSortRankKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".sortRank"; + } /** * Gets a key for the given node and a property of its child nodes to store @@ -85,10 +98,18 @@ final class ResultViewerPersistence { static String getColumnHiddenKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".hidden"; } + + static String getColumnHiddenKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".hidden"; + } private static String getColumnKeyBase(TableFilterNode node, String propName) { return stripNonAlphanumeric(node.getColumnOrderKey()) + "." + stripNonAlphanumeric(propName); } + + private static String getColumnKeyBase(SearchResultsDTO searchResult, String propName) { + return stripNonAlphanumeric(searchResult.getSignature()) + "." + stripNonAlphanumeric(propName); + } private static String stripNonAlphanumeric(String str) { return str.replaceAll("[^a-zA-Z0-9_]", ""); diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java b/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java index 0e756133e1..5ded426270 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java @@ -18,18 +18,16 @@ */ package org.sleuthkit.autopsy.corecomponents; +import com.google.common.collect.Lists; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.awt.Image; import java.awt.Toolkit; import java.lang.ref.SoftReference; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; @@ -38,16 +36,19 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.logging.Level; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; import javax.swing.SwingUtilities; import javax.swing.Timer; import org.apache.commons.lang3.StringUtils; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; -import org.openide.nodes.ChildFactory; +import org.openide.nodes.AbstractNode; +import org.openide.nodes.Children; import org.openide.nodes.FilterNode; import org.openide.nodes.Node; import org.openide.util.NbBundle; +import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion; import static org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.loadSortCriteria; import org.sleuthkit.autopsy.coreutils.ImageUtils; @@ -64,72 +65,61 @@ import org.sleuthkit.datamodel.Content; * Filter-node like class, but adds additional hierarchy (pages) as parents of * the filtered nodes. */ -class ThumbnailViewChildren extends ChildFactory.Detachable { +class ThumbnailViewChildren extends Children.Keys { private static final Logger logger = Logger.getLogger(ThumbnailViewChildren.class.getName()); @NbBundle.Messages("ThumbnailViewChildren.progress.cancelling=(Cancelling)") private static final String CANCELLING_POSTIX = Bundle.ThumbnailViewChildren_progress_cancelling(); + static final int IMAGES_PER_PAGE = 200; private final ExecutorService executor = Executors.newFixedThreadPool(3, new ThreadFactoryBuilder().setNameFormat("Thumbnail-Loader-%d").build()); private final List tasks = new ArrayList<>(); private final Node parent; + private final List> pages = new ArrayList<>(); private int thumbSize; - private final Map nodeCache = new HashMap<>(); - - private final Object isSupportedLock = new Object(); - /** * The constructor * - * @param parent The node which is the parent of this children. + * @param parent The node which is the parent of this children. * @param thumbSize The hight and/or width of the thumbnails in pixels. */ ThumbnailViewChildren(Node parent, int thumbSize) { + super(true); //support lazy loading + this.parent = parent; this.thumbSize = thumbSize; } @Override - protected synchronized boolean createKeys(List toPopulate) { - List suppContent = Stream.of(parent.getChildren().getNodes()) - .filter(n -> isSupported(n)) - .sorted(getComparator()) - .collect(Collectors.toList()); + protected void addNotify() { + super.addNotify(); - List currNodeNames = suppContent.stream() - .map(nd -> nd.getName()) - .collect(Collectors.toList()); + /* + * TODO: When lazy loading of original nodes is fixed, we should be + * asking the datamodel for the children instead and not counting the + * children nodes (which might not be preloaded at this point). + */ + // get list of supported children sorted by persisted criteria + final List suppContent + = Stream.of(parent.getChildren().getNodes()) + .filter(ThumbnailViewChildren::isSupported) + .sorted(getComparator()) + .collect(Collectors.toList()); - // find set of keys that are no longer present with current createKeys call. - Set toRemove = new HashSet<>(nodeCache.keySet()); - currNodeNames.forEach((k) -> toRemove.remove(k)); + if (suppContent.isEmpty()) { + //if there are no images, there is nothing more to do + return; + } - // remove them from cache - toRemove.forEach((k) -> nodeCache.remove(k)); + //divide the supported content into buckets + pages.addAll(Lists.partition(suppContent, IMAGES_PER_PAGE)); - toPopulate.addAll(suppContent); - return true; - } - - @Override - protected Node createNodeForKey(Node key) { - ThumbnailViewNode retNode = new ThumbnailViewNode(key, this.thumbSize); - nodeCache.put(key.getName(), retNode); - return retNode; - } - - @Override - protected void removeNotify() { - super.removeNotify(); - nodeCache.clear(); - } - - void update() { - this.refresh(false); + //the keys are just the indices into the pages list. + setKeys(IntStream.range(0, pages.size()).boxed().collect(Collectors.toList())); } /** @@ -214,15 +204,21 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { return null; } - private boolean isSupported(Node node) { + @Override + protected void removeNotify() { + super.removeNotify(); + pages.clear(); + } + @Override + protected Node[] createNodes(Integer pageNum) { + return new Node[]{new ThumbnailPageNode(pageNum, pages.get(pageNum))}; + + } + + private static boolean isSupported(Node node) { if (node != null) { - Content content = null; - // this is to prevent dead-locking issue with simultaneous accesses. - synchronized (isSupportedLock) { - content = node.getLookup().lookup(AbstractFile.class); - } - + Content content = node.getLookup().lookup(AbstractFile.class); if (content != null) { return ImageUtils.thumbnailSupported(content); } @@ -232,9 +228,10 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { public void setThumbsSize(int thumbSize) { this.thumbSize = thumbSize; - for (ThumbnailViewNode node : nodeCache.values()) { - node.setThumbSize(thumbSize); - + for (Node page : getNodes()) { + for (Node node : page.getChildren().getNodes()) { + ((ThumbnailViewNode) node).setThumbSize(thumbSize); + } } } @@ -252,7 +249,6 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { return task; } else { return null; - } } @@ -277,7 +273,7 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { * The constructor * * @param wrappedNode The original node that this Node wraps. - * @param thumbSize The hight and/or width of the thumbnail in pixels. + * @param thumbSize The hight and/or width of the thumbnail in pixels. */ private ThumbnailViewNode(Node wrappedNode, int thumbSize) { super(wrappedNode, FilterNode.Children.LEAF); @@ -384,4 +380,66 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { } } } + + /** + * Node representing a page of thumbnails, a parent of image nodes, with a + * name showing children range + */ + private class ThumbnailPageNode extends AbstractNode { + + private ThumbnailPageNode(Integer pageNum, List childNodes) { + + super(new ThumbnailPageNodeChildren(childNodes), Lookups.singleton(pageNum)); + setName(Integer.toString(pageNum + 1)); + int from = 1 + (pageNum * IMAGES_PER_PAGE); + int to = from + ((ThumbnailPageNodeChildren) getChildren()).getChildCount() - 1; + setDisplayName(from + "-" + to); + + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS + } + } + + /** + * Children.Keys implementation which uses nodes as keys, and wraps them in + * ThumbnailViewNodes as the child nodes. + * + */ + private class ThumbnailPageNodeChildren extends Children.Keys { + + /* + * wrapped original nodes + */ + private List keyNodes = null; + + ThumbnailPageNodeChildren(List keyNodes) { + super(true); + this.keyNodes = keyNodes; + } + + @Override + protected void addNotify() { + super.addNotify(); + setKeys(keyNodes); + } + + @Override + protected void removeNotify() { + super.removeNotify(); + setKeys(Collections.emptyList()); + } + + int getChildCount() { + return keyNodes.size(); + } + + @Override + protected Node[] createNodes(Node wrapped) { + if (wrapped != null) { + final ThumbnailViewNode thumb = new ThumbnailViewNode(wrapped, thumbSize); + return new Node[]{thumb}; + } else { + return new Node[]{}; + } + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED index 40a7fc2c77..076ff00d47 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED @@ -300,10 +300,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window ImageNode.createSheet.name.name=Name ImageNode.createSheet.name.displayName=Name ImageNode.createSheet.name.desc=no description -Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null! -Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""! -Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0} -Installer.tskLibErr.err=Fatal Error! +Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\! +Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\! +Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0} +Installer.tskLibErr.err=Fatal Error\! InterestingHits.interestingItems.text=INTERESTING ITEMS InterestingHits.displayName.text=Interesting Items InterestingHits.createSheet.name.name=Name diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java index b7e33860b9..872269cfbd 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java @@ -34,6 +34,7 @@ import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; @@ -69,7 +70,7 @@ public class DataSourceFilesNode extends DisplayableItemNode { } public DataSourceFilesNode(long dsObjId) { - super(Children.create(new DataSourcesNodeChildren(dsObjId), true), Lookups.singleton(NAME)); + super(Children.create(new FileSystemFactory.DataSourceFactory(dsObjId), true), Lookups.singleton(NAME)); displayName = (dsObjId > 0) ? NbBundle.getMessage(DataSourceFilesNode.class, "DataSourcesNode.group_by_datasource.name") : NAME; init(); } @@ -85,75 +86,6 @@ public class DataSourceFilesNode extends DisplayableItemNode { return getClass().getName(); } - /* - * Custom Keys implementation that listens for new data sources being added. - */ - public static class DataSourcesNodeChildren extends AbstractContentChildren { - - private static final Logger logger = Logger.getLogger(DataSourcesNodeChildren.class.getName()); - private final long datasourceObjId; - - List currentKeys; - - public DataSourcesNodeChildren() { - this(0); - } - - public DataSourcesNodeChildren(long dsObjId) { - super("ds_" + Long.toString(dsObjId)); - this.currentKeys = new ArrayList<>(); - this.datasourceObjId = dsObjId; - } - - private final PropertyChangeListener pcl = new PropertyChangeListener() { - @Override - public void propertyChange(PropertyChangeEvent evt) { - String eventType = evt.getPropertyName(); - if (eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) { - refresh(true); - } - } - }; - - @Override - protected void onAdd() { - Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl); - } - - @Override - protected void onRemove() { - Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl); - currentKeys.clear(); - } - - @Override - protected List makeKeys() { - try { - if (datasourceObjId == 0) { - currentKeys = Case.getCurrentCaseThrows().getDataSources(); - } else { - Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(datasourceObjId); - currentKeys = new ArrayList<>(Arrays.asList(content)); - } - - Collections.sort(currentKeys, new Comparator() { - @Override - public int compare(Content content1, Content content2) { - String content1Name = content1.getName().toLowerCase(); - String content2Name = content2.getName().toLowerCase(); - return content1Name.compareTo(content2Name); - } - - }); - - } catch (TskCoreException | NoCurrentCaseException | TskDataException ex) { - logger.log(Level.SEVERE, "Error getting data sources: {0}", ex.getMessage()); // NON-NLS - } - - return currentKeys; - } - } - @Override public boolean isLeafTypeNode() { return false; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java index e765f4ee1d..16280fee76 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java @@ -48,7 +48,7 @@ class DataSourceGroupingNode extends DisplayableItemNode { super(Optional.ofNullable(createDSGroupingNodeChildren(dataSource)) .orElse(new RootContentChildren(Arrays.asList(Collections.EMPTY_LIST))), Lookups.singleton(dataSource)); - + if (dataSource instanceof Image) { Image image = (Image) dataSource; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java index f043463268..d26224f817 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java @@ -29,7 +29,6 @@ import java.util.logging.Level; import java.util.stream.Collectors; import javax.swing.Action; import org.openide.nodes.ChildFactory; - import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; @@ -46,6 +45,7 @@ import org.sleuthkit.autopsy.datamodel.hosts.AssociatePersonsMenuAction; import org.sleuthkit.autopsy.datamodel.hosts.MergeHostMenuAction; import org.sleuthkit.autopsy.datamodel.hosts.RemoveParentPersonAction; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.autopsy.corecomponents.SelectionResponder; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Host; @@ -221,7 +221,7 @@ public class HostNode extends DisplayableItemNode implements SelectionResponder{ * @param hosts The HostDataSources key. */ HostNode(HostDataSources hosts) { - this(Children.create(new HostGroupingChildren(HOST_DATA_SOURCES, hosts.getHost()), true), hosts.getHost()); + this(Children.create(new FileSystemFactory(hosts.getHost()), true), hosts.getHost()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java b/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java index ce3a6763d9..3d4247701c 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java @@ -140,8 +140,11 @@ public final class IconsUtil { } else if (typeID == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { imageFile = "keyword_hits.png"; } else if (typeID == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() - || typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID()) { + || typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() + || typeID == BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID()) { imageFile = "interesting_item.png"; + } else if (typeID == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + imageFile = "accounts.png"; } else { imageFile = "artifact-icon.png"; //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java index 19ebb3ea5a..ac7b67108e 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java @@ -43,6 +43,7 @@ import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode; import org.sleuthkit.autopsy.datamodel.SlackFileNode; import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode; import org.sleuthkit.autopsy.datamodel.VolumeNode; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.Content; @@ -102,7 +103,10 @@ class DirectoryTreeFilterChildren extends FilterNode.Children { return new Node[]{cloned}; } else if (origNode instanceof FileSize.FileSizeRootNode) { Node cloned = ((FileSize.FileSizeRootNode) origNode).clone(); - return new Node[]{cloned}; + return new Node[]{cloned}; + } else if (origNode instanceof FileSystemFactory.FileSystemTreeNode) { + Node cloned = ((FileSystemFactory.FileSystemTreeNode) origNode).clone(); + return new Node[]{cloned}; } else if (origNode == null || !(origNode instanceof DisplayableItemNode)) { return new Node[]{}; } diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java index 126ec5e737..872a8c9744 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java @@ -104,7 +104,7 @@ public final class ExtractUnallocAction extends AbstractAction { public ExtractUnallocAction(String title, Image image, Volume volume) { super(title); - this.volume = null; + this.volume = volume; this.image = image; chooserFactory = new JFileChooserFactory(CustomFileChooser.class); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java index f13a614e54..6fb9c352da 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -33,10 +33,11 @@ public interface FileIngestModule extends IngestModule { * IMPORTANT: In addition to returning ProcessResult.OK or * ProcessResult.ERROR, modules should log all errors using methods provided * by the org.sleuthkit.autopsy.coreutils.Logger class. Log messages should - * include the name and object ID of the data being processed. If an - * exception has been caught by the module, the exception should be sent to - * the Logger along with the log message so that a stack trace will appear - * in the application log. + * include the name and object ID of the data being processed and any other + * information that would be useful for debugging. If an exception has been + * caught by the module, the exception should be sent to the logger along + * with the log message so that a stack trace will appear in the application + * log. * * @param file The file to analyze. * diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index 37e4b549ee..350096d626 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -170,10 +170,10 @@ public final class IngestJob { * Starts data source level analysis for this job if it is running in * streaming ingest mode. */ - void processStreamingIngestDataSource() { + void addStreamedDataSource() { if (ingestMode == Mode.STREAMING) { if (ingestModuleExecutor != null) { - ingestModuleExecutor.startStreamingModeDataSourceAnalysis(); + ingestModuleExecutor.addStreamedDataSource(); } else { logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline"); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 7a93b15b22..47b4d9b601 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -33,6 +33,7 @@ import java.util.regex.Pattern; import java.util.stream.Stream; import javax.annotation.concurrent.GuardedBy; import javax.swing.JOptionPane; +import javax.swing.SwingUtilities; import org.netbeans.api.progress.ProgressHandle; import org.openide.util.Cancellable; import org.openide.util.NbBundle; @@ -41,6 +42,7 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.ingest.IngestTasksScheduler.IngestJobTasksSnapshot; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; @@ -190,21 +192,26 @@ final class IngestJobExecutor { /* * If running in the NetBeans thick client application version of Autopsy, - * NetBeans progress bars are used to display ingest job progress in the - * lower right hand corner of the main application window. A layer of - * abstraction to allow alternate representations of progress could be used - * here, as it is in other places in the application, to better decouple - * this object from the application's presentation layer. + * NetBeans progress handles (i.e., progress bars) are used to display + * ingest job progress in the lower right hand corner of the main + * application window. + * + * A layer of abstraction to allow alternate representations of progress + * could be used here, as it is in other places in the application (see + * implementations and usage of + * org.sleuthkit.autopsy.progress.ProgressIndicator interface), to better + * decouple this object from the application's presentation layer. */ private final boolean usingNetBeansGUI; - private final Object dataSourceIngestProgressLock = new Object(); + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle dataSourceIngestProgressBar; - private final Object fileIngestProgressLock = new Object(); + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private final List filesInProgress = new ArrayList<>(); - private long estimatedFilesToProcess; - private long processedFiles; + private volatile long estimatedFilesToProcess; + private volatile long processedFiles; + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle fileIngestProgressBar; - private final Object artifactIngestProgressLock = new Object(); + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle artifactIngestProgressBar; /* @@ -534,7 +541,7 @@ final class IngestJobExecutor { } /** - * Determnines which inges job stage to start in and starts up the ingest + * Determnines which ingest job stage to start in and starts up the ingest * module pipelines. * * @return A collection of ingest module startup errors, empty on success. @@ -664,41 +671,39 @@ final class IngestJobExecutor { */ private void startBatchModeAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage(String.format("Starting analysis in batch mode for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS + logInfoMessage("Starting ingest job in batch mode"); //NON-NLS stage = IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; if (hasFileIngestModules()) { /* - * Do a count of the files the data source processor has added - * to the case database. This number will be used to estimate - * how many files remain to be analyzed as each file ingest task - * is completed. + * Do an estimate of the total number of files to be analyzed. + * This number will be used to estimate of how many files remain + * to be analyzed as each file ingest task is completed. The + * numbers are estimates because file analysis can add carved + * files and/or derived files. */ - long filesToProcess; if (files.isEmpty()) { - filesToProcess = dataSource.accept(new GetFilesCountVisitor()); + /* + * Do a count of the files the data source processor has + * added to the case database. + */ + estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); } else { - filesToProcess = files.size(); - } - synchronized (fileIngestProgressLock) { - estimatedFilesToProcess = filesToProcess; + /* + * Use the number of files in the specified subset of all of + * the files for the data source. + */ + estimatedFilesToProcess = files.size(); } + startFileIngestProgressBar(); } - if (usingNetBeansGUI) { - /* - * Start ingest progress bars in the lower right hand corner of - * the main application window. - */ - if (hasFileIngestModules()) { - startFileIngestProgressBar(); - } - if (hasHighPriorityDataSourceIngestModules()) { - startDataSourceIngestProgressBar(); - } - if (hasDataArtifactIngestModules()) { - startArtifactIngestProgressBar(); - } + if (hasHighPriorityDataSourceIngestModules()) { + startDataSourceIngestProgressBar(); + } + + if (hasDataArtifactIngestModules()) { + startArtifactIngestProgressBar(); } /* @@ -708,60 +713,75 @@ final class IngestJobExecutor { currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; /* - * Schedule ingest tasks and then immediately check for stage - * completion. This is necessary because it is possible that zero - * tasks will actually make it to task execution due to the file - * filter or other ingest job settings. In that case, there will - * never be a stage completion check in an ingest thread executing - * an ingest task, so such a job would run forever without a check - * here. + * Schedule ingest tasks. If only analyzing a subset of the files in + * the data source, the current assumption is that only file ingest + * tasks for those files need to be scheduled. Data artifact ingest + * tasks will be scheduled as data artifacts produced by the file + * analysis are posted to the blackboard. */ if (!files.isEmpty() && hasFileIngestModules()) { taskScheduler.scheduleFileIngestTasks(this, files); } else if (hasHighPriorityDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) { taskScheduler.scheduleIngestTasks(this); } + + /* + * Check for stage completion. This is necessary because it is + * possible that none of the tasks that were just scheduled will + * actually make it to task execution, due to the file filter or + * other ingest job settings. If that happens, there will never be + * another stage completion check for this job in an ingest thread + * executing an ingest task, so such a job would run forever without + * a check here. + */ checkForStageCompleted(); } } /** * Starts analysis for a streaming mode ingest job. For a streaming mode - * job, the data source processor streams files in as it adds them to the - * case database and file analysis can begin before data source level - * analysis. + * job, a data source processor streams files to this ingest job executor as + * it adds the files to the case database, and file level analysis can begin + * before data source level analysis. */ private void startStreamingModeAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage("Starting data source level analysis in streaming mode"); //NON-NLS + logInfoMessage("Starting ingest job in streaming mode"); //NON-NLS stage = IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY; - if (usingNetBeansGUI) { + if (hasFileIngestModules()) { /* - * Start ingest progress bars in the lower right hand corner of - * the main application window. + * Start the file ingest progress bar, but do not schedule any + * file or data source ingest tasks. File ingest tasks will + * instead be scheduled as files are streamed in via + * addStreamedFiles(), and a data source ingest task will be + * scheduled later, via addStreamedDataSource(). + * + * Note that because estimated files remaining to process still + * has its initial value of zero, the fle ingest progress bar + * will start in the "indeterminate" state. A rough estimate of + * the files to processed will be computed later, when all of + * the files have been added to the case database, as signaled + * by a call to the addStreamedDataSource(). */ - if (hasFileIngestModules()) { - /* - * Note that because estimated files remaining to process - * still has its initial value of zero, the progress bar - * will start in the "indeterminate" state. An estimate of - * the files to process can be computed later, when all of - * the files have been added ot the case database. - */ - startFileIngestProgressBar(); - } - if (hasDataArtifactIngestModules()) { - startArtifactIngestProgressBar(); - } + estimatedFilesToProcess = 0; + startFileIngestProgressBar(); } if (hasDataArtifactIngestModules()) { + startArtifactIngestProgressBar(); + /* * Schedule artifact ingest tasks for any artifacts currently in * the case database. This needs to be done before any files or * the data source are streamed in to avoid analyzing the data * artifacts added to the case database by those tasks twice. + * This constraint is implemented by restricting construction of + * a streaming mode IngestJob to + * IngestManager.openIngestStream(), which constructs and starts + * the job before returning the IngestStream. This means that + * the code in this method will run before addStreamedFiles() or + * addStreamedDataSource() can be called via the IngestStream. */ taskScheduler.scheduleDataArtifactIngestTasks(this); } @@ -773,7 +793,7 @@ final class IngestJobExecutor { * case database and streamed in, and the data source is now ready for * analysis. */ - void startStreamingModeDataSourceAnalysis() { + void addStreamedDataSource() { synchronized (stageTransitionLock) { logInfoMessage("Starting full first stage analysis in streaming mode"); //NON-NLS stage = IngestJobExecutor.IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; @@ -781,43 +801,36 @@ final class IngestJobExecutor { if (hasFileIngestModules()) { /* - * Do a count of the files the data source processor has added - * to the case database. This number will be used to estimate - * how many files remain to be analyzed as each file ingest task - * is completed. + * For ingest job progress reporting purposes, do a count of the + * files the data source processor has added to the case + * database. */ - long filesToProcess = dataSource.accept(new GetFilesCountVisitor()); - synchronized (fileIngestProgressLock) { - estimatedFilesToProcess = filesToProcess; - if (usingNetBeansGUI && fileIngestProgressBar != null) { - fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); - } - } + estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); + switchFileIngestProgressBarToDeterminate(); } - if (usingNetBeansGUI) { + currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; + if (hasHighPriorityDataSourceIngestModules()) { /* * Start a data source level ingest progress bar in the lower * right hand corner of the main application window. The file * and data artifact ingest progress bars were already started * in startStreamingModeAnalysis(). */ - if (hasHighPriorityDataSourceIngestModules()) { - startDataSourceIngestProgressBar(); - } - } + startDataSourceIngestProgressBar(); - currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; - if (hasHighPriorityDataSourceIngestModules()) { + /* + * Schedule a task for the data source. + */ IngestJobExecutor.taskScheduler.scheduleDataSourceIngestTask(this); } else { /* - * If no data source level ingest task is scheduled at this time - * and all of the file level and artifact ingest tasks scheduled - * during the initial file streaming stage have already - * executed, there will never be a stage completion check in an - * ingest thread executing an ingest task, so such a job would - * run forever without a check here. + * If no data source level ingest task is scheduled at this + * time, and all of the file level and artifact ingest tasks + * scheduled during the initial file streaming stage have + * already been executed, there will never be a stage completion + * check in an ingest thread executing an ingest task for this + * job, so such a job would run forever without a check here. */ checkForStageCompleted(); } @@ -830,13 +843,9 @@ final class IngestJobExecutor { private void startLowPriorityDataSourceAnalysis() { synchronized (stageTransitionLock) { if (hasLowPriorityDataSourceIngestModules()) { - logInfoMessage(String.format("Starting low priority data source analysis for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS + logInfoMessage("Starting low priority data source analysis"); //NON-NLS stage = IngestJobExecutor.IngestJobStage.LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; - - if (usingNetBeansGUI) { - startDataSourceIngestProgressBar(); - } - + startDataSourceIngestProgressBar(); currentDataSourceIngestPipeline = lowPriorityDataSourceIngestPipeline; taskScheduler.scheduleDataSourceIngestTask(this); } @@ -844,40 +853,42 @@ final class IngestJobExecutor { } /** - * Starts a data artifacts analysis NetBeans progress bar in the lower right - * hand corner of the main application window. The progress bar provides the - * user with a task cancellation button. Pressing it cancels the ingest job. - * Analysis already completed at the time that cancellation occurs is NOT - * discarded. + * Starts a NetBeans progress bar for data artifacts analysis in the lower + * right hand corner of the main application window. The progress bar + * provides the user with a task cancellation button. Pressing it cancels + * the ingest job. Analysis already completed at the time that cancellation + * occurs is NOT discarded. */ private void startArtifactIngestProgressBar() { if (usingNetBeansGUI) { - synchronized (artifactIngestProgressLock) { + SwingUtilities.invokeLater(() -> { String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); return true; } }); artifactIngestProgressBar.start(); artifactIngestProgressBar.switchToIndeterminate(); - } + }); } } /** - * Starts a data source level analysis NetBeans progress bar in the lower - * right hand corner of the main application window. The progress bar + * Starts a NetBeans progress bar for data source level analysis in the + * lower right hand corner of the main application window. The progress bar * provides the user with a task cancellation button. Pressing it cancels - * either the currently running data source level ingest module or the + * either the currently running data source level ingest module, or the * entire ingest job. Analysis already completed at the time that * cancellation occurs is NOT discarded. */ private void startDataSourceIngestProgressBar() { if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()); dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override @@ -894,21 +905,25 @@ final class IngestJobExecutor { String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title"); JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE); if (panel.cancelAllDataSourceIngestModules()) { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); } else { - IngestJobExecutor.this.cancelCurrentDataSourceIngestModule(); + new Thread(() -> { + IngestJobExecutor.this.cancelCurrentDataSourceIngestModule(); + }).start(); } return true; } }); dataSourceIngestProgressBar.start(); dataSourceIngestProgressBar.switchToIndeterminate(); - } + }); } } /** - * Starts a file analysis NetBeans progress bar in the lower right hand + * Starts a NetBeans progress bar for file analysis in the lower right hand * corner of the main application window. The progress bar provides the user * with a task cancellation button. Pressing it cancels the ingest job. * Analysis already completed at the time that cancellation occurs is NOT @@ -916,18 +931,63 @@ final class IngestJobExecutor { */ private void startFileIngestProgressBar() { if (usingNetBeansGUI) { - synchronized (fileIngestProgressLock) { + SwingUtilities.invokeLater(() -> { String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()); fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); return true; } }); fileIngestProgressBar.start(); - fileIngestProgressBar.switchToDeterminate((int) this.estimatedFilesToProcess); - } + fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); + }); + } + } + + /** + * Finishes the first stage progress bars. + */ + private void finishFirstStageProgressBars() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.finish(); + dataSourceIngestProgressBar = null; + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.finish(); + fileIngestProgressBar = null; + } + }); + } + } + + /** + * Finishes all current progress bars. + */ + private void finishAllProgressBars() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.finish(); + dataSourceIngestProgressBar = null; + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.finish(); + fileIngestProgressBar = null; + } + + if (artifactIngestProgressBar != null) { + artifactIngestProgressBar.finish(); + artifactIngestProgressBar = null; + } + }); } } @@ -968,21 +1028,7 @@ final class IngestJobExecutor { shutDownIngestModulePipeline(pipeline); } - if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.finish(); - dataSourceIngestProgressBar = null; - } - } - - synchronized (fileIngestProgressLock) { - if (fileIngestProgressBar != null) { - fileIngestProgressBar.finish(); - fileIngestProgressBar = null; - } - } - } + finishFirstStageProgressBars(); if (!jobCancelled && hasLowPriorityDataSourceIngestModules()) { startLowPriorityDataSourceAnalysis(); @@ -993,7 +1039,8 @@ final class IngestJobExecutor { } /** - * Shuts down the ingest module pipelines and progress bars. + * Shuts down the ingest module pipelines and ingest job progress + * indicators. */ private void shutDown() { synchronized (stageTransitionLock) { @@ -1002,29 +1049,7 @@ final class IngestJobExecutor { shutDownIngestModulePipeline(currentDataSourceIngestPipeline); shutDownIngestModulePipeline(artifactIngestPipeline); - - if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.finish(); - dataSourceIngestProgressBar = null; - } - } - - synchronized (fileIngestProgressLock) { - if (fileIngestProgressBar != null) { - fileIngestProgressBar.finish(); - fileIngestProgressBar = null; - } - } - - synchronized (artifactIngestProgressLock) { - if (artifactIngestProgressBar != null) { - artifactIngestProgressBar.finish(); - artifactIngestProgressBar = null; - } - } - } + finishAllProgressBars(); if (ingestJobInfo != null) { if (jobCancelled) { @@ -1100,7 +1125,7 @@ final class IngestJobExecutor { if (!pipeline.isEmpty()) { /* * Get the file from the task. If the file was "streamed," - * the task may only have the file object ID and a trip to + * the task may only have the file object ID, and a trip to * the case database will be required. */ AbstractFile file; @@ -1114,46 +1139,24 @@ final class IngestJobExecutor { return; } - synchronized (fileIngestProgressLock) { - ++processedFiles; - if (usingNetBeansGUI) { - if (processedFiles <= estimatedFilesToProcess) { - fileIngestProgressBar.progress(file.getName(), (int) processedFiles); - } else { - fileIngestProgressBar.progress(file.getName(), (int) estimatedFilesToProcess); - } - filesInProgress.add(file.getName()); - } - } - /** - * Run the file through the modules in the pipeline. + * Run the file through the modules in the file ingest + * pipeline. */ + final String fileName = file.getName(); + processedFiles++; + updateFileIngestProgressForFileTaskStarted(fileName); List errors = new ArrayList<>(); errors.addAll(pipeline.performTask(task)); if (!errors.isEmpty()) { logIngestModuleErrors(errors, file); } - - if (usingNetBeansGUI && !jobCancelled) { - synchronized (fileIngestProgressLock) { - /** - * Update the file ingest progress bar again, in - * case the file was being displayed. - */ - filesInProgress.remove(file.getName()); - if (filesInProgress.size() > 0) { - fileIngestProgressBar.progress(filesInProgress.get(0)); - } else { - fileIngestProgressBar.progress(""); - } - } - } + updateFileProgressBarForFileTaskCompleted(fileName); } fileIngestPipelinesQueue.put(pipeline); } } catch (InterruptedException ex) { - logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file obj ID = %d)", task.getFileId()), ex); + logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file object ID = %d, thread ID = %d)", task.getFileId(), task.getThreadId()), ex); Thread.currentThread().interrupt(); } finally { taskScheduler.notifyTaskCompleted(task); @@ -1248,100 +1251,196 @@ final class IngestJobExecutor { /** * Updates the display name shown on the current data source level ingest - * progress bar for this job. + * progress bar for this job, if the job has not been cancelled. * * @param displayName The new display name. */ void updateDataSourceIngestProgressBarDisplayName(String displayName) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.setDisplayName(displayName); } - } + }); } } /** * Switches the current data source level ingest progress bar to determinate - * mode. This should be called if the total work units to process the data - * source is known. + * mode, if the job has not been cancelled. This should be called if the + * total work units to process the data source is known. * * @param workUnits Total number of work units for the processing of the * data source. */ void switchDataSourceIngestProgressBarToDeterminate(int workUnits) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.switchToDeterminate(workUnits); } - } + }); } } /** * Switches the current data source level ingest progress bar to - * indeterminate mode. This should be called if the total work units to - * process the data source is unknown. + * indeterminate mode, if the job has not been cancelled. This should be + * called if the total work units to process the data source is unknown. */ void switchDataSourceIngestProgressBarToIndeterminate() { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.switchToIndeterminate(); } - } + }); } } /** * Updates the current data source level ingest progress bar with the number - * of work units performed, if in the determinate mode. + * of work units performed, if in the determinate mode, and the job has not + * been cancelled. * * @param workUnits Number of work units performed. */ void advanceDataSourceIngestProgressBar(int workUnits) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress("", workUnits); } - } + }); } } /** * Updates the current data source level ingest progress bar with a new task - * name, where the task name is the "subtitle" under the display name. + * name, where the task name is the "subtitle" under the display name, if + * the job has not been cancelled. * * @param currentTask The task name. */ void advanceDataSourceIngestProgressBar(String currentTask) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress(currentTask); } - } + }); } } /** * Updates the current data source level ingest progress bar with a new task - * name and the number of work units performed, if in the determinate mode. - * The task name is the "subtitle" under the display name. + * name and the number of work units performed, if in the determinate mode, + * and the job has not been cancelled. The task name is the "subtitle" under + * the display name. * * @param currentTask The task name. * @param workUnits Number of work units performed. */ void advanceDataSourceIngestProgressBar(String currentTask, int workUnits) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress(currentTask, workUnits); } - } + }); + } + } + + /** + * Switches the file ingest progress bar to determinate mode, using the + * estimated number of files to process as the number of work units. + */ + private void switchFileIngestProgressBarToDeterminate() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (fileIngestProgressBar != null) { + fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); + } + }); + } + } + + /** + * Updates the current file ingest progress bar upon start of analysis of a + * file, if the job has not been cancelled. + * + * @param fileName The name of the file. + */ + private void updateFileIngestProgressForFileTaskStarted(String fileName) { + if (usingNetBeansGUI && !jobCancelled) { + SwingUtilities.invokeLater(() -> { + /* + * If processedFiles exceeds estimatedFilesToProcess, i.e., the + * max work units set for the progress bar, the progress bar + * will go into an infinite loop throwing + * IllegalArgumentExceptions in the EDT (NetBeans bug). Also, a + * check-then-act race condition needs to be avoided here. This + * can be done without guarding processedFiles and + * estimatedFilesToProcess with the same lock because + * estimatedFilesToProcess does not change after it is used to + * switch the progress bar to determinate mode. + */ + long processedFilesCapture = processedFiles; + if (processedFilesCapture <= estimatedFilesToProcess) { + fileIngestProgressBar.progress(fileName, (int) processedFilesCapture); + } else { + fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); + } + filesInProgress.add(fileName); + }); + } + } + + /** + * Updates the current file ingest progress bar upon completion of analysis + * of a file, if the job has not been cancelled. + * + * @param fileName The name of the file. + */ + private void updateFileProgressBarForFileTaskCompleted(String fileName) { + if (usingNetBeansGUI && !jobCancelled) { + SwingUtilities.invokeLater(() -> { + filesInProgress.remove(fileName); + /* + * Display the name of another file in progress, or the empty + * string if there are none. + */ + if (filesInProgress.size() > 0) { + fileIngestProgressBar.progress(filesInProgress.get(0)); + } else { + fileIngestProgressBar.progress(""); // NON-NLS + } + }); + } + } + + /** + * Displays a "cancelling" message on all of the current ingest message + * progress bars. + */ + private void displayCancellingProgressMessage() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName())); + dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); + fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } + + if (artifactIngestProgressBar != null) { + artifactIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataArtifactIngest.displayName", dataSource.getName())); + artifactIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } + }); } } @@ -1358,27 +1457,28 @@ final class IngestJobExecutor { /** * Rescinds a temporary cancellation of data source level ingest that was - * used to stop a single data source level ingest module for this job. + * used to stop a single data source level ingest module for this job. The + * data source ingest progress bar is reset, if the job has not been + * cancelled. * * @param moduleDisplayName The display name of the module that was stopped. */ void currentDataSourceIngestModuleCancellationCompleted(String moduleDisplayName) { currentDataSourceIngestModuleCancelled = false; cancelledDataSourceIngestModules.add(moduleDisplayName); - - if (usingNetBeansGUI) { - /** - * A new progress bar must be created because the cancel button of - * the previously constructed component is disabled by NetBeans when - * the user selects the "OK" button of the cancellation confirmation - * dialog popped up by NetBeans when the progress bar cancel button - * is pressed. - */ - synchronized (dataSourceIngestProgressLock) { + if (usingNetBeansGUI && !jobCancelled) { + SwingUtilities.invokeLater(() -> { + /** + * A new progress bar must be created because the cancel button + * of the previously constructed component is disabled by + * NetBeans when the user selects the "OK" button of the + * cancellation confirmation dialog popped up by NetBeans when + * the progress bar cancel button is pressed. + */ dataSourceIngestProgressBar.finish(); dataSourceIngestProgressBar = null; startDataSourceIngestProgressBar(); - } + }); } } @@ -1404,32 +1504,20 @@ final class IngestJobExecutor { } /** - * Requests cancellation of ingest, i.e., a shutdown of the data source - * level and file level ingest pipelines. + * Requests cancellation of the ingest job. All pending ingest tasks for the + * job will be cancelled, but any tasks already in progress in ingest + * threads will run to completion. This could take a while if the ingest + * modules executing the tasks are not checking the ingest job cancellation + * flag via the ingest joib context. * * @param reason The cancellation reason. */ void cancel(IngestJob.CancellationReason reason) { jobCancelled = true; cancellationReason = reason; + displayCancellingProgressMessage(); IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this); - if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName())); - dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); - } - } - - synchronized (this.fileIngestProgressLock) { - if (null != this.fileIngestProgressBar) { - this.fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); - this.fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); - } - } - } - synchronized (threadRegistrationLock) { for (Thread thread : pausedIngestThreads) { thread.interrupt(); @@ -1437,15 +1525,13 @@ final class IngestJobExecutor { pausedIngestThreads.clear(); } - /* - * If a data source had no tasks in progress it may now be complete. - */ checkForStageCompleted(); } /** - * Queries whether or not cancellation, i.e., a shut down of the data source - * level and file level ingest pipelines for this job, has been requested. + * Queries whether or not cancellation of the ingest job has been requested. + * Ingest modules executing ingest tasks for this job should check this flag + * frequently via the ingest job context. * * @return True or false. */ @@ -1454,9 +1540,9 @@ final class IngestJobExecutor { } /** - * Gets the reason this job was cancelled. + * If the ingest job was cancelled, gets the reason this job was cancelled. * - * @return The cancellation reason, may be not cancelled. + * @return The cancellation reason, may be "not cancelled." */ IngestJob.CancellationReason getCancellationReason() { return cancellationReason; @@ -1469,7 +1555,7 @@ final class IngestJobExecutor { * @param message The message. */ private void logInfoMessage(String message) { - logger.log(Level.INFO, String.format("%s (data source = %s, object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS + logger.log(Level.INFO, String.format("%s (data source = %s, data source object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS } /** @@ -1481,7 +1567,7 @@ final class IngestJobExecutor { * @param throwable The throwable associated with the error. */ private void logErrorMessage(Level level, String message, Throwable throwable) { - logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS + logger.log(level, String.format("%s (data source = %s, data source object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS } /** @@ -1492,7 +1578,7 @@ final class IngestJobExecutor { * @param message The message. */ private void logErrorMessage(Level level, String message) { - logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS + logger.log(level, String.format("%s (data source = %s, data source object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS } /** @@ -1514,7 +1600,7 @@ final class IngestJobExecutor { */ private void logIngestModuleErrors(List errors, AbstractFile file) { for (IngestModuleError error : errors) { - logErrorMessage(Level.SEVERE, String.format("%s experienced an error during analysis while processing file %s, object ID %d", error.getModuleDisplayName(), file.getName(), file.getId()), error.getThrowable()); //NON-NLS + logErrorMessage(Level.SEVERE, String.format("%s experienced an error during analysis while processing file %s (object ID = %d)", error.getModuleDisplayName(), file.getName(), file.getId()), error.getThrowable()); //NON-NLS } } @@ -1549,20 +1635,25 @@ final class IngestJobExecutor { long snapShotTime = new Date().getTime(); IngestJobTasksSnapshot tasksSnapshot = null; if (includeIngestTasksSnapshot) { - synchronized (fileIngestProgressLock) { - processedFilesCount = processedFiles; - estimatedFilesToProcessCount = estimatedFilesToProcess; - snapShotTime = new Date().getTime(); - } + processedFilesCount = processedFiles; + estimatedFilesToProcessCount = estimatedFilesToProcess; + snapShotTime = new Date().getTime(); tasksSnapshot = taskScheduler.getTasksSnapshotForJob(getIngestJobId()); } - - return new Snapshot(dataSource.getName(), - getIngestJobId(), createTime, + return new Snapshot( + dataSource.getName(), + getIngestJobId(), + createTime, getCurrentDataSourceIngestModule(), - fileIngestRunning, fileIngestStartTime, - jobCancelled, cancellationReason, cancelledDataSourceIngestModules, - processedFilesCount, estimatedFilesToProcessCount, snapShotTime, tasksSnapshot); + fileIngestRunning, + fileIngestStartTime, + jobCancelled, + cancellationReason, + cancelledDataSourceIngestModules, + processedFilesCount, + estimatedFilesToProcessCount, + snapShotTime, + tasksSnapshot); } /** diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java index 2d00727858..fe43bb12b3 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java @@ -67,7 +67,7 @@ class IngestJobInputStream implements IngestStream { @Override public synchronized void close() { closed = true; - ingestJob.processStreamingIngestDataSource(); + ingestJob.addStreamedDataSource(); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 2c87487232..0dc2597481 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -1050,7 +1050,7 @@ public class IngestManager implements IngestProgressSnapshotProvider { } /** - * Creates and starts an ingest job for a collection of data sources. + * Creates and starts an ingest job. */ private final class StartIngestJobTask implements Callable { diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java index ec83a129c1..6f53ad0f52 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -60,6 +60,11 @@ public interface IngestModule { * must also be taken into consideration when sharing resources between * module instances. See IngestModuleReferenceCounter. * + * IMPORTANT: Start up IngestModuleException messages are displayed to the + * user, if a user is present. Therefore, an exception to the policy that + * exception messages are not localized is appropriate in this method. Also, + * the exception messages should be user-friendly. + * * @param context Provides data and services specific to the ingest job and * the ingest pipeline of which the module is a part. * diff --git a/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java b/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java index 4698b68006..a6721e0ab2 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java @@ -49,7 +49,7 @@ public final class RunIngestModulesAction extends AbstractAction { @Messages("RunIngestModulesAction.name=Run Ingest Modules") private static final long serialVersionUID = 1L; - private static final Logger logger = Logger.getLogger(SpecialDirectoryNode.class.getName()); + private static final Logger logger = Logger.getLogger(RunIngestModulesAction.class.getName()); /* * Note that the execution context is the name of the dialog that used to be diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java new file mode 100644 index 0000000000..4ed213ced3 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -0,0 +1,66 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import java.beans.PropertyChangeEvent; +import java.util.Set; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; + +/** + * Internal methods that DAOs implement. + */ +abstract class AbstractDAO { + + /** + * Clear any cached data (Due to change in view). + */ + abstract void clearCaches(); + + /** + * Handles an autopsy event (i.e. ingest, case, etc.). This method is + * responsible for clearing internal caches that are effected by the event + * and returning one or more DAOEvents that should be broadcasted to the + * views. + * + * @param evt The Autopsy event that recently came in from Ingest/Case. + * + * @return The list of DAOEvents that should be broadcasted to the views or + * an empty list if the Autopsy events are irrelevant to this DAO. + */ + abstract Set processEvent(PropertyChangeEvent evt); + + /** + * Handles the ingest complete or cancelled event. Any events that are + * delayed or batched are flushed and returned. + * + * @return The flushed events that were delayed and batched. + */ + abstract Set handleIngestComplete(); + + /** + * Returns any categories that require a tree refresh. For instance, if web + * cache and web bookmarks haven't updated recently, and are currently set + * to an indeterminate amount (i.e. "..."), then broadcast an event forcing + * tree to update to a determinate count. + * + * @return The categories that require a tree refresh. + */ + abstract Set shouldRefreshTree(); +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index af246a9374..8acff81099 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -18,27 +18,42 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.AnalysisResultSetEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.AnalysisResultEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; +import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AnalysisResult; @@ -64,12 +79,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { private static Logger logger = Logger.getLogger(AnalysisResultDAO.class.getName()); - // rule of thumb: 10 entries times number of cached SearchParams sub-types (BlackboardArtifactSearchParam, AnalysisResultSetSearchParam, KeywordHitSearchParam) - private static final int CACHE_SIZE = 30; - private static final long CACHE_DURATION = 2; - private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, AnalysisResultTableSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); - private static AnalysisResultDAO instance = null; @NbBundle.Messages({ @@ -133,6 +142,21 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } + @SuppressWarnings("deprecation") + private static final Set STANDARD_SET_TYPES = ImmutableSet.of( + BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID(), + BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(), + BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID(), + BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID() + ); + + // TODO We can probably combine all the caches at some point + private final Cache, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final Cache, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + + private final TreeCounts treeCounts = new TreeCounts<>(); + private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { SleuthkitCase skCase = getCase(); @@ -236,7 +260,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return new AnalysisResultRowDTO((AnalysisResult) artifact, srcContent, isTimelineSupported, cellValues, id); } - public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); if (artType == null || artType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT @@ -247,18 +271,31 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); + return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); + } + + private boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) { + if (!(eventData instanceof AnalysisResultEvent)) { + return false; } - return searchParamsCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); + AnalysisResultEvent analysisResultEvt = (AnalysisResultEvent) eventData; + return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactType().getTypeID() + && (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId()); } - public boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, ModuleDataEvent eventData) { - return key.getArtifactType().equals(eventData.getBlackboardArtifactType()); + private boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) { + if (!(event instanceof AnalysisResultSetEvent)) { + return false; + } + + AnalysisResultSetEvent setEvent = (AnalysisResultSetEvent) event; + return isAnalysisResultsInvalidating((AnalysisResultSearchParam) key, (AnalysisResultEvent) setEvent) + && Objects.equals(key.getSetName(), setEvent.getSetName()); } - public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + // GVDTODO handle keyword hits + public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Data source id must be null or > 0. " @@ -266,14 +303,10 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } - - return searchParamsCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams)); + return setHitCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams)); } - public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Data source id must be null or > 0. " @@ -281,11 +314,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } + return keywordHitCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams)); + } - return searchParamsCache.get(searchParams, () -> fetchKeywordHitsForTable(searchParams)); + public void dropAnalysisResultCache() { + analysisResultCache.invalidateAll(); + } + + public void dropHashHitCache() { + setHitCache.invalidateAll(); + } + + public void dropKeywordHitCache() { + keywordHitCache.invalidateAll(); } /** @@ -304,14 +345,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // get row dto's sorted by display name Map typeCounts = getCounts(BlackboardArtifact.Category.ANALYSIS_RESULT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() - .map(entry -> { - return new TreeResultsDTO.TreeItemDTO<>( - BlackboardArtifact.Category.ANALYSIS_RESULT.name(), - new AnalysisResultSearchParam(entry.getKey(), dataSourceId), - entry.getKey().getTypeID(), - entry.getKey().getDisplayName(), - entry.getValue()); - }) + .map(entry -> getTreeItem(entry.getKey(), dataSourceId, TreeDisplayCount.getDeterminate(entry.getValue()))) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -323,6 +357,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } } + private TreeItemDTO getTreeItem(BlackboardArtifact.Type type, Long dataSourceId, TreeDisplayCount displayCount) { + return new TreeItemDTO<>( + BlackboardArtifact.Category.ANALYSIS_RESULT.name(), + new AnalysisResultSearchParam(type, dataSourceId), + type.getTypeID(), + type.getDisplayName(), + displayCount); + } + /** * * @param type The artifact type to filter on. @@ -404,18 +447,28 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { .filter(entry -> nullSetName != null || entry.getKey() != null) .sorted((a, b) -> compareSetStrings(a.getKey(), b.getKey())) .map(entry -> { - return new TreeItemDTO<>( - type.getTypeName(), - new AnalysisResultSetSearchParam(type, dataSourceId, entry.getKey()), - entry.getKey() == null ? 0 : entry.getKey(), + return getSetTreeItem(type, + dataSourceId, + entry.getKey(), entry.getKey() == null ? nullSetName : entry.getKey(), - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .collect(Collectors.toList()); return new TreeResultsDTO<>(allSets); } + private TreeItemDTO getSetTreeItem(BlackboardArtifact.Type type, + Long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) { + + return new TreeItemDTO<>( + type.getTypeName(), + new AnalysisResultSetSearchParam(type, dataSourceId, setName), + setName == null ? 0 : setName, + displayName, + displayCount); + } + /** * Compares set strings to properly order for the tree. * @@ -547,13 +600,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { break; } - items.add(new TreeItemDTO<>( + TreeItemDTO treeItem = new TreeItemDTO<>( "KEYWORD_SEARCH_TERMS", new KeywordSearchTermParams(setName, searchTerm, TskData.KeywordSearchQueryType.valueOf(searchType), hasChildren, dataSourceId), searchTermModified, searchTermModified, - count - )); + TreeDisplayCount.getDeterminate(count) + ); + + items.add(treeItem); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); @@ -642,7 +697,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId), keyword, keyword == null ? "" : keyword, - count)); + TreeDisplayCount.getDeterminate(count))); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); @@ -655,35 +710,163 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } } - /** - * Handles basic functionality of fetching and paging of analysis results. - */ - static abstract class AbstractAnalysisResultFetcher extends DAOFetcher { + @Override + void clearCaches() { + this.analysisResultCache.invalidateAll(); + this.keywordHitCache.invalidateAll(); + this.setHitCache.invalidateAll(); + this.handleIngestComplete(); + } - /** - * Main constructor. - * - * @param params Parameters to handle fetching of data. - */ - public AbstractAnalysisResultFetcher(T params) { - super(params); - } + @Override + Set processEvent(PropertyChangeEvent evt) { + // get a grouping of artifacts mapping the artifact type id to data source id. + Map> analysisResultMap = new HashMap<>(); + Map, Set> setMap = new HashMap<>(); + Map> keywordHitsMap = new HashMap<>(); - @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - ModuleDataEvent dataEvent = this.getModuleDataFromEvt(evt); - if (dataEvent == null) { - return false; + ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); + if (dataEvt != null) { + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + try { + if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { + // GVDTODO handle keyword hits + } else if (STANDARD_SET_TYPES.contains(art.getArtifactTypeID())) { + BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); + String setName = setAttr == null ? null : setAttr.getValueString(); + setMap.computeIfAbsent(Pair.of(art.getType(), setName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + + } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { + analysisResultMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch necessary information for artifact id: " + art.getId(), ex); + } } - - return MainDAO.getInstance().getAnalysisResultDAO().isAnalysisResultsInvalidating(this.getParameters(), dataEvent); } + + // don't continue if no relevant items found + if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { + return Collections.emptySet(); + } + + clearRelevantCacheEntries(analysisResultMap, setMap); + + List daoEvents = getResultViewEvents(analysisResultMap, setMap); + Collection treeEvents = this.treeCounts.enqueueAll(daoEvents).stream() + .map(arEvt -> getTreeEvent(arEvt, false)) + .collect(Collectors.toList()); + + return Stream.of(daoEvents, treeEvents) + .flatMap(lst -> lst.stream()) + .collect(Collectors.toSet()); + } + + /** + * Generate result view events from digest of Autopsy events. + * + * @param analysisResultMap Contains the analysis results that do not use a + * set name. A mapping of analysis result type ids + * to data sources where the results were created. + * @param resultsWithSetMap Contains the anlaysis results that do use a set + * name. A mapping of (analysis result type id, set + * name) to data sources where results were + * created. + * + * @return The list of dao events. + */ + private List getResultViewEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { + Stream analysisResultEvts = analysisResultMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); + + Stream analysisResultSetEvts = resultsWithSetMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId))); + + // GVDTODO handle keyword hits + return Stream.of(analysisResultEvts, analysisResultSetEvts) + .flatMap(s -> s) + .collect(Collectors.toList()); + } + + /** + * Clears cache entries given the provided digests of autopsy events. + * + * @param analysisResultMap Contains the analysis results that do not use a + * set name. A mapping of analysis result type ids + * to data sources where the results were created. + * @param resultsWithSetMap Contains the anlaysis results that do use a set + * name. A mapping of (analysis result type id, set + * name) to data sources where results were + * created. + */ + private void clearRelevantCacheEntries(Map> analysisResultMap, Map, Set> resultsWithSetMap) { + ConcurrentMap, AnalysisResultTableSearchResultsDTO> arConcurrentMap = this.analysisResultCache.asMap(); + arConcurrentMap.forEach((k, v) -> { + BlackboardArtifactSearchParam searchParam = k.getParamData(); + Set dsIds = analysisResultMap.get(searchParam.getArtifactType()); + if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { + arConcurrentMap.remove(k); + } + }); + + ConcurrentMap, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap(); + setConcurrentMap.forEach((k, v) -> { + AnalysisResultSetSearchParam searchParam = k.getParamData(); + Set dsIds = resultsWithSetMap.get(Pair.of(searchParam.getArtifactType(), searchParam.getSetName())); + if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { + arConcurrentMap.remove(k); + } + }); + + // GVDTODO handle clearing cache for keyword search hits + // private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + } + + /** + * Creates a TreeEvent instance based on the analysis result event and + * whether or not this event should trigger a full refresh of counts. + * + * @param arEvt The analysis result event. + * @param shouldRefresh Whether or not this tree event should trigger a full + * refresh of counts. + * + * @return The tree event. + */ + private TreeEvent getTreeEvent(AnalysisResultEvent arEvt, boolean shouldRefresh) { + // GVDTODO handle keyword items when integrated + if (arEvt instanceof AnalysisResultSetEvent) { + AnalysisResultSetEvent setEvt = (AnalysisResultSetEvent) arEvt; + return new TreeEvent(getSetTreeItem(setEvt.getArtifactType(), setEvt.getDataSourceId(), + setEvt.getSetName(), setEvt.getSetName() == null ? "" : setEvt.getSetName(), + shouldRefresh ? TreeDisplayCount.UNSPECIFIED : TreeDisplayCount.INDETERMINATE), + shouldRefresh); + } else { + return new TreeEvent(getTreeItem(arEvt.getArtifactType(), arEvt.getDataSourceId(), + shouldRefresh ? TreeDisplayCount.UNSPECIFIED : TreeDisplayCount.INDETERMINATE), + shouldRefresh); + } + } + + @Override + Set handleIngestComplete() { + return this.treeCounts.flushEvents().stream() + .map(arEvt -> getTreeEvent(arEvt, true)) + .collect(Collectors.toSet()); + } + + @Override + Set shouldRefreshTree() { + return this.treeCounts.getEventTimeouts().stream() + .map(arEvt -> getTreeEvent(arEvt, true)) + .collect(Collectors.toSet()); } /** * Handles fetching and paging of analysis results. */ - public static class AnalysisResultFetcher extends AbstractAnalysisResultFetcher { + public static class AnalysisResultFetcher extends DAOFetcher { /** * Main constructor. @@ -694,16 +877,25 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { super(params); } + protected AnalysisResultDAO getDAO() { + return MainDAO.getInstance().getAnalysisResultDAO(); + } + @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isAnalysisResultsInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of hashset hits. */ - public static class AnalysisResultSetFetcher extends AbstractAnalysisResultFetcher { + public static class AnalysisResultSetFetcher extends DAOFetcher { /** * Main constructor. @@ -714,16 +906,25 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { super(params); } + protected AnalysisResultDAO getDAO() { + return MainDAO.getInstance().getAnalysisResultDAO(); + } + @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isAnalysisResultsSetInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of keyword hits. */ - public static class KeywordHitResultFetcher extends AbstractAnalysisResultFetcher { + public static class KeywordHitResultFetcher extends DAOFetcher { /** * Main constructor. @@ -734,9 +935,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { super(params); } + protected AnalysisResultDAO getDAO() { + return MainDAO.getInstance().getAnalysisResultDAO(); + } + @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getAnalysisResultDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + // GVDTODO + return true; } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java index 300d8004a0..de243f83ec 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java @@ -27,11 +27,12 @@ import org.sleuthkit.datamodel.BlackboardArtifact; public class AnalysisResultTableSearchResultsDTO extends BaseSearchResultsDTO { private static final String TYPE_ID = "ANALYSIS_RESULT"; + private static final String SIGNATURE = "analysisresult"; private final BlackboardArtifact.Type artifactType; public AnalysisResultTableSearchResultsDTO(BlackboardArtifact.Type artifactType, List columns, List items, long startItem, long totalResultsCount) { - super(TYPE_ID, artifactType.getDisplayName(), columns, items, startItem, totalResultsCount); + super(TYPE_ID, artifactType.getDisplayName(), columns, items, SIGNATURE, startItem, totalResultsCount); this.artifactType = artifactType; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java index 62c3d09410..767a3f15e3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java @@ -31,18 +31,20 @@ public class BaseSearchResultsDTO implements SearchResultsDTO { private final List items; private final long totalResultsCount; private final long startItem; + private final String signature; - public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items) { - this(typeId, displayName, columns, items, 0, items == null ? 0 : items.size()); + public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items, String signature) { + this(typeId, displayName, columns, items, signature, 0, items == null ? 0 : items.size()); } - public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items, long startItem, long totalResultsCount) { + public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items, String signature, long startItem, long totalResultsCount) { this.typeId = typeId; this.displayName = displayName; this.columns = columns; this.items = items; this.startItem = startItem; this.totalResultsCount = totalResultsCount; + this.signature = signature; } @Override @@ -74,4 +76,9 @@ public class BaseSearchResultsDTO implements SearchResultsDTO { public long getStartItem() { return startItem; } + + @Override + public String getSignature() { + return signature; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java index 12f5c5703e..3c06da7ddc 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java @@ -11,7 +11,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -71,7 +70,7 @@ import org.sleuthkit.datamodel.TskCoreException; "BlackboardArtifactDAO.columnKeys.dataSource.displayName=Data Source", "BlackboardArtifactDAO.columnKeys.dataSource.description=Data Source" }) -abstract class BlackboardArtifactDAO { +abstract class BlackboardArtifactDAO extends AbstractDAO { private static Logger logger = Logger.getLogger(BlackboardArtifactDAO.class.getName()); @@ -149,6 +148,7 @@ abstract class BlackboardArtifactDAO { protected static Set getIgnoredTreeTypes() { return IGNORED_TYPES; } + TableData createTableData(BlackboardArtifact.Type artType, List arts) throws TskCoreException, NoCurrentCaseException { Map> artifactAttributes = new HashMap<>(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED index ab71f5d900..9bd4e34ab9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED @@ -34,6 +34,9 @@ BlackboardArtifactDAO.columnKeys.score.name=Score BlackboardArtifactDAO.columnKeys.srcFile.description=Source Name BlackboardArtifactDAO.columnKeys.srcFile.displayName=Source Name BlackboardArtifactDAO.columnKeys.srcFile.name=Source Name +CommAccounts.name.text=Communication Accounts +CommAccountsDAO.fileColumns.noDescription=No Description +DataArtifactDAO_Accounts_displayName=Communication Accounts FileExtDocumentFilter_html_displayName=HTML FileExtDocumentFilter_office_displayName=Office FileExtDocumentFilter_pdf_displayName=PDF @@ -71,6 +74,8 @@ FileSystemColumnUtils.abstractFileColumns.sizeColLbl=Size FileSystemColumnUtils.abstractFileColumns.typeDirColLbl=Type(Dir) FileSystemColumnUtils.abstractFileColumns.typeMetaColLbl=Type(Meta) FileSystemColumnUtils.abstractFileColumns.useridColLbl=UserID +FileSystemColumnUtils.getContentName.dotDir=[current folder] +FileSystemColumnUtils.getContentName.dotDotDir=[parent folder] FileSystemColumnUtils.imageColumns.devID=Device ID FileSystemColumnUtils.imageColumns.sectorSize=Sector Size (Bytes) FileSystemColumnUtils.imageColumns.size=Size (Bytes) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index 720ff4ece6..186817ab24 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -22,18 +22,33 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; +import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard; @@ -43,16 +58,18 @@ import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** - * Provides information to populate the results viewer for data in the + * Provides information to populate the results viewer for data in the * Communication Accounts section. */ @Messages({"CommAccountsDAO.fileColumns.noDescription=No Description"}) -public class CommAccountsDAO { +public class CommAccountsDAO extends AbstractDAO { + private static final Logger logger = Logger.getLogger(CommAccountsDAO.class.getName()); private static final int CACHE_SIZE = Account.Type.PREDEFINED_ACCOUNT_TYPES.size(); // number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; - private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; + private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private final TreeCounts accountCounts = new TreeCounts<>(); private static CommAccountsDAO instance = null; @@ -64,20 +81,20 @@ public class CommAccountsDAO { return instance; } - public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + SleuthkitCase getCase() throws NoCurrentCaseException { + return Case.getCurrentCaseThrows().getSleuthkitCase(); + } + + public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getType() == null) { throw new IllegalArgumentException("Must have non-null type"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { throw new IllegalArgumentException("Data source id must be greater than 0 or null"); } - - SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } + SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); return searchParamsCache.get(searchParams, () -> fetchCommAccountsDTOs(searchParams)); - } + } /** * Returns a list of paged artifacts. @@ -103,10 +120,10 @@ public class CommAccountsDAO { Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Long dataSourceId = cacheKey.getParamData().getDataSourceId(); BlackboardArtifact.Type artType = BlackboardArtifact.Type.TSK_ACCOUNT; - - if ( (cacheKey.getStartItem() == 0) // offset is zero AND - && ( (cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max - || (cacheKey.getMaxResultsCount() == null)) ) { // OR max number of results was not specified + + if ((cacheKey.getStartItem() == 0) // offset is zero AND + && ((cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max + || (cacheKey.getMaxResultsCount() == null))) { // OR max number of results was not specified return currentPageSize; } else { if (dataSourceId != null) { @@ -114,9 +131,9 @@ public class CommAccountsDAO { } else { return blackboard.getArtifactsCount(artType.getTypeID()); } - } + } } - + @NbBundle.Messages({"CommAccounts.name.text=Communication Accounts"}) private SearchResultsDTO fetchCommAccountsDTOs(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException, SQLException { @@ -125,22 +142,188 @@ public class CommAccountsDAO { Blackboard blackboard = skCase.getBlackboard(); Account.Type type = cacheKey.getParamData().getType(); Long dataSourceId = cacheKey.getParamData().getDataSourceId(); - List allArtifacts = blackboard.getArtifacts(BlackboardArtifact.Type.TSK_ACCOUNT, + List allArtifacts = blackboard.getArtifacts(BlackboardArtifact.Type.TSK_ACCOUNT, BlackboardAttribute.Type.TSK_ACCOUNT_TYPE, type.getTypeName(), dataSourceId, - false); // GVDTODO handle approved/rejected account actions - + false); // GVDTODO handle approved/rejected account actions + // get current page of artifacts List pagedArtifacts = getPaged(allArtifacts, cacheKey); - + // Populate the attributes for paged artifacts in the list. This is done using one database call as an efficient way to - // load many artifacts/attributes at once. + // load many artifacts/attributes at once. blackboard.loadBlackboardAttributes(pagedArtifacts); - + DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO(); BlackboardArtifactDAO.TableData tableData = dataArtDAO.createTableData(BlackboardArtifact.Type.TSK_ACCOUNT, pagedArtifacts); return new DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type.TSK_ACCOUNT, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), allArtifacts.size()); } - + + private static TreeResultsDTO.TreeItemDTO createAccountTreeItem(Account.Type accountType, Long dataSourceId, TreeResultsDTO.TreeDisplayCount count) { + return new TreeResultsDTO.TreeItemDTO<>( + "ACCOUNTS", + new CommAccountsSearchParams(accountType, dataSourceId), + accountType.getTypeName(), + accountType.getDisplayName(), + count); + } + + /** + * Returns the accounts and their counts in the current data source if a + * data source id is provided or all accounts if data source id is null. + * + * @param dataSourceId The data source id or null for no data source filter. + * + * @return The results. + * + * @throws ExecutionException + */ + public TreeResultsDTO getAccountsCounts(Long dataSourceId) throws ExecutionException { + String query = "res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n" + + "FROM (\n" + + " SELECT MIN(account_types.type_name) AS account_type, MIN(account_types.display_name) AS account_display_name\n" + + " FROM blackboard_artifacts\n" + + " LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n" + + " LEFT JOIN account_types ON blackboard_attributes.value_text = account_types.type_name\n" + + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + "\n" + + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n" + + (dataSourceId != null && dataSourceId > 0 ? " AND blackboard_artifacts.data_source_obj_id = " + dataSourceId + " " : " ") + "\n" + + " -- group by artifact_id to ensure only one account type per artifact\n" + + " GROUP BY blackboard_artifacts.artifact_id\n" + + ") res\n" + + "GROUP BY res.account_type\n" + + "ORDER BY MIN(res.account_display_name)"; + + List> accountParams = new ArrayList<>(); + try { + Set indeterminateTypes = this.accountCounts.getEnqueued().stream() + .filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId) + .map(evt -> evt.getAccountType()) + .collect(Collectors.toSet()); + + getCase().getCaseDbAccessManager().select(query, (resultSet) -> { + try { + while (resultSet.next()) { + String accountTypeName = resultSet.getString("account_type"); + String accountDisplayName = resultSet.getString("account_display_name"); + Account.Type accountType = new Account.Type(accountTypeName, accountDisplayName); + long count = resultSet.getLong("count"); + TreeDisplayCount treeDisplayCount = indeterminateTypes.contains(accountType) + ? TreeDisplayCount.INDETERMINATE + : TreeResultsDTO.TreeDisplayCount.getDeterminate(count); + + accountParams.add(createAccountTreeItem(accountType, dataSourceId, treeDisplayCount)); + } + } catch (SQLException ex) { + logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); + } + }); + + // return results + return new TreeResultsDTO<>(accountParams); + + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); + } + } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + this.handleIngestComplete(); + } + + @Override + Set handleIngestComplete() { + return SubDAOUtils.getIngestCompleteEvents( + this.accountCounts, + (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED) + ); + } + + @Override + Set shouldRefreshTree() { + return SubDAOUtils.getRefreshEvents( + this.accountCounts, + (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED) + ); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + // get a grouping of artifacts mapping the artifact type id to data source id. + ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); + if (dataEvt == null) { + return Collections.emptySet(); + } + + Map> accountTypeMap = new HashMap<>(); + + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + try { + if (art.getType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + BlackboardAttribute accountTypeAttribute = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); + if (accountTypeAttribute == null) { + continue; + } + + String accountTypeName = accountTypeAttribute.getValueString(); + if (accountTypeName == null) { + continue; + } + + accountTypeMap.computeIfAbsent(getCase().getCommunicationsManager().getAccountType(accountTypeName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } + } catch (NoCurrentCaseException | TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); + } + } + + // don't do anything else if no relevant events + if (accountTypeMap.isEmpty()) { + return Collections.emptySet(); + } + + SubDAOUtils.invalidateKeys(this.searchParamsCache, + (sp) -> Pair.of(sp.getType(), sp.getDataSourceId()), accountTypeMap); + + List accountEvents = new ArrayList<>(); + for (Map.Entry> entry : accountTypeMap.entrySet()) { + Account.Type accountType = entry.getKey(); + for (Long dsObjId : entry.getValue()) { + CommAccountsEvent newEvt = new CommAccountsEvent(accountType, dsObjId); + accountEvents.add(newEvt); + } + } + + Stream treeEvents = this.accountCounts.enqueueAll(accountEvents).stream() + .map(daoEvt -> new TreeEvent(createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.INDETERMINATE), false)); + + return Stream.of(accountEvents.stream(), treeEvents) + .flatMap(s -> s) + .collect(Collectors.toSet()); + } + + /** + * Returns true if the dao event could update the data stored in the + * parameters. + * + * @param parameters The parameters. + * @param evt The event. + * + * @return True if event invalidates parameters. + */ + private boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { + if (evt instanceof CommAccountsEvent) { + CommAccountsEvent commEvt = (CommAccountsEvent) evt; + return (parameters.getType().getTypeName().equals(commEvt.getType())) + && (parameters.getDataSourceId() == null || Objects.equals(parameters.getDataSourceId(), commEvt.getDataSourceId())); + } else { + return false; + + } + } + /** * Handles fetching and paging of data for communication accounts. */ @@ -155,53 +338,18 @@ public class CommAccountsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getCommAccountsDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected CommAccountsDAO getDAO() { + return MainDAO.getInstance().getCommAccountsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - CommAccountsSearchParams params = this.getParameters(); - String eventType = evt.getPropertyName(); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { - /** - * Checking for a current case is a stop gap measure until a - * different way of handling the closing of cases is worked out. - * Currently, remote events may be received for a case that is - * already closed. - */ - try { - Case.getCurrentCaseThrows(); - /** - * Even with the check above, it is still possible that the - * case will be closed in a different thread before this - * code executes. If that happens, it is possible for the - * event to have a null oldValue. - */ - ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); - if (null != eventData - && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { - - // check that the update is for the same account type - for (BlackboardArtifact artifact : eventData.getArtifacts()) { - for (BlackboardAttribute atribute : artifact.getAttributes()) { - if (atribute.getAttributeType() == BlackboardAttribute.Type.TSK_ACCOUNT_TYPE) { - if (atribute.getValueString().equals(params.getType().toString())) { - return true; - } - } - } - } - } - } catch (NoCurrentCaseException notUsed) { - // Case is closed, do nothing. - } catch (TskCoreException ex) { - // There is nothing we can do with the exception. - } - } - return false; + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isCommAcctInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java index 91c5e272ae..8b187b15a7 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java @@ -20,16 +20,18 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.Objects; import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.BlackboardArtifact; /** * Key for accessing data about communication accounts from the DAO. */ -public class CommAccountsSearchParams { +public class CommAccountsSearchParams extends DataArtifactSearchParam { private final Account.Type type; private final Long dataSourceId; public CommAccountsSearchParams(Account.Type type, Long dataSourceId) { + super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); this.type = type; this.dataSourceId = dataSourceId; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index e8436d7de9..4728c27010 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -18,20 +18,35 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DataArtifactEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; +import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; +import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -43,6 +58,9 @@ import org.sleuthkit.datamodel.TskCoreException; /** * DAO for providing data about data artifacts to populate the results viewer. */ +@NbBundle.Messages({ + "DataArtifactDAO_Accounts_displayName=Communication Accounts" +}) public class DataArtifactDAO extends BlackboardArtifactDAO { private static Logger logger = Logger.getLogger(DataArtifactDAO.class.getName()); @@ -65,21 +83,22 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final TreeCounts treeCounts = new TreeCounts<>(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); String pagedWhereClause = getWhereClause(cacheKey); - + List arts = new ArrayList<>(); arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause)); blackboard.loadBlackboardAttributes(arts); - - long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + + long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); + TableData tableData = createTableData(artType, arts); return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } @@ -92,7 +111,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return new DataArtifactRowDTO((DataArtifact) artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id); } - public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); if (artType == null || artType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT @@ -103,19 +122,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - this.dataArtifactCache.invalidate(searchParams); - } - return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } - public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, ModuleDataEvent eventData) { - return key.getArtifactType().equals(eventData.getBlackboardArtifactType()); - } - - public void dropDataArtifactCache() { - dataArtifactCache.invalidateAll(); + private boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { + if (!(eventData instanceof DataArtifactEvent)) { + return false; + } else { + DataArtifactEvent dataArtEvt = (DataArtifactEvent) eventData; + return key.getArtifactType().getTypeID() == dataArtEvt.getArtifactType().getTypeID() + && (key.getDataSourceId() == null || (key.getDataSourceId() == dataArtEvt.getDataSourceId())); + } } /** @@ -133,15 +150,19 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { public TreeResultsDTO getDataArtifactCounts(Long dataSourceId) throws ExecutionException { try { // get row dto's sorted by display name + Set indeterminateTypes = this.treeCounts.getEnqueued().stream() + .filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId) + .map(evt -> evt.getArtifactType()) + .collect(Collectors.toSet()); + Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() .map(entry -> { - return new TreeResultsDTO.TreeItemDTO<>( - BlackboardArtifact.Category.DATA_ARTIFACT.name(), - new DataArtifactSearchParam(entry.getKey(), dataSourceId), - entry.getKey().getTypeID(), - entry.getKey().getDisplayName(), - entry.getValue()); + TreeDisplayCount displayCount = indeterminateTypes.contains(entry.getKey()) + ? TreeDisplayCount.INDETERMINATE + : TreeDisplayCount.getDeterminate(entry.getValue()); + + return createDataArtifactTreeItem(entry.getKey(), dataSourceId, displayCount); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -154,6 +175,96 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } + @Override + void clearCaches() { + this.dataArtifactCache.invalidateAll(); + this.handleIngestComplete(); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + // get a grouping of artifacts mapping the artifact type id to data source id. + ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); + if (dataEvt == null) { + return Collections.emptySet(); + } + + Map> artifactTypeDataSourceMap = new HashMap<>(); + + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + try { + if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory()) + // accounts are handled in CommAccountsDAO + && art.getType().getTypeID() != BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + + artifactTypeDataSourceMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); + } + } + + // don't do anything else if no relevant events + if (artifactTypeDataSourceMap.isEmpty()) { + return Collections.emptySet(); + } + + SubDAOUtils.invalidateKeys(this.dataArtifactCache, (sp) -> Pair.of(sp.getArtifactType(), sp.getDataSourceId()), artifactTypeDataSourceMap); + + // gather dao events based on artifacts + List dataArtifactEvents = new ArrayList<>(); + for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { + BlackboardArtifact.Type artType = entry.getKey(); + for (Long dsObjId : entry.getValue()) { + DataArtifactEvent newEvt = new DataArtifactEvent(artType, dsObjId); + dataArtifactEvents.add(newEvt); + } + } + + Stream dataArtifactTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() + .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)); + + return Stream.of(dataArtifactEvents.stream(), dataArtifactTreeEvents) + .flatMap(s -> s) + .collect(Collectors.toSet()); + } + + /** + * Returns the display name for an artifact type. + * + * @param artifactType The artifact type. + * + * @return The display name. + */ + public String getDisplayName(BlackboardArtifact.Type artifactType) { + return artifactType.getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + ? Bundle.DataArtifactDAO_Accounts_displayName() + : artifactType.getDisplayName(); + } + + private TreeItemDTO createDataArtifactTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { + return new TreeResultsDTO.TreeItemDTO<>( + BlackboardArtifact.Category.DATA_ARTIFACT.name(), + new DataArtifactSearchParam(artifactType, dataSourceId), + artifactType.getTypeID(), + getDisplayName(artifactType), + displayCount); + } + + @Override + Set handleIngestComplete() { + return SubDAOUtils.getIngestCompleteEvents(this.treeCounts, + (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); + } + + @Override + Set shouldRefreshTree() { + return SubDAOUtils.getRefreshEvents(this.treeCounts, + (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); + } + + /* * Handles fetching and paging of data artifacts. */ @@ -168,19 +279,18 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected DataArtifactDAO getDAO() { + return MainDAO.getInstance().getDataArtifactsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - ModuleDataEvent dataEvent = this.getModuleDataFromEvt(evt); - if (dataEvent == null) { - return false; - } + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - return MainDAO.getInstance().getDataArtifactsDAO().isDataArtifactInvalidating(this.getParameters(), dataEvent); + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isDataArtifactInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java index 0ddb4ce52e..45cb791826 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java @@ -27,11 +27,12 @@ import org.sleuthkit.datamodel.BlackboardArtifact; public class DataArtifactTableSearchResultsDTO extends BaseSearchResultsDTO { private static final String TYPE_ID = "DATA_ARTIFACT"; + private static final String SIGNATURE = "dataartifact"; private final BlackboardArtifact.Type artifactType; public DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type artifactType, List columns, List items, long startItem, long totalResultsCount) { - super(TYPE_ID, artifactType.getDisplayName(), columns, items, startItem, totalResultsCount); + super(TYPE_ID, artifactType.getDisplayName(), columns, items, SIGNATURE, startItem, totalResultsCount); this.artifactType = artifactType; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java index 4ef8cfb935..ddf98f538f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.ExtensionMediaType; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.LayoutFile; import org.sleuthkit.datamodel.SlackFile; @@ -29,10 +30,6 @@ import org.sleuthkit.datamodel.TskData; */ public class FileRowDTO extends BaseRowDTO { - public enum ExtensionMediaType { - IMAGE, VIDEO, AUDIO, DOC, EXECUTABLE, TEXT, WEB, PDF, ARCHIVE, UNCATEGORIZED - } - private static String TYPE_ID = "FILE"; public static String getTypeIdForClass() { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java new file mode 100644 index 0000000000..246875fffb --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java @@ -0,0 +1,69 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * Filters by file size for views. + */ +public enum FileSizeFilter { + SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS + SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS + SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null); + //NON-NLS + private final int id; + private final String name; + private final String displayName; + private long minBound; + private Long maxBound; + + private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) { + this.id = id; + this.name = name; + this.displayName = displayName; + this.minBound = minBound; + this.maxBound = maxBound; + } + + public String getName() { + return this.name; + } + + public int getId() { + return this.id; + } + + public String getDisplayName() { + return this.displayName; + } + + /** + * @return The minimum inclusive bound (non-null). + */ + public long getMinBound() { + return minBound; + } + + /** + * @return The maximum exclusive bound (if null, no upper limit). + */ + public Long getMaxBound() { + return maxBound; + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index 82648f4db2..9825dfb13d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -18,29 +18,41 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import java.sql.ResultSet; +import java.sql.SQLException; import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.logging.Level; import org.openide.util.NbBundle.Messages; import org.apache.commons.lang3.StringUtils; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; +import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.SlackFile; +import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.Volume; import org.sleuthkit.datamodel.VolumeSystem; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; /** * Utility class for creating consistent table data. */ -class FileSystemColumnUtils { +public class FileSystemColumnUtils { private static final Logger logger = Logger.getLogger(FileSystemColumnUtils.class.getName()); @@ -151,9 +163,10 @@ class FileSystemColumnUtils { * * @param content The Content object. * - * @return The type corresponding to the content; UNSUPPORTED if the content will not be displayed + * @return The type corresponding to the content; UNSUPPORTED if the + * content will not be displayed in the file system section of the tree. */ - private static ContentType getContentType(Content content) { + private static ContentType getDisplayableContentType(Content content) { if (content instanceof Image) { return ContentType.IMAGE; } else if (content instanceof Volume) { @@ -167,9 +180,11 @@ class FileSystemColumnUtils { } /** - * Check whether a given content object should be displayed. + * Check whether a given content object should be displayed in the + * file system section of the tree. * We can display an object if ContentType is not UNSUPPORTED - * and if it is not the root directory. + * and if it is not the root directory. We can not display + * file systems, volume systems, artifacts, etc. * * @param content The content. * @@ -177,9 +192,15 @@ class FileSystemColumnUtils { */ static boolean isDisplayable(Content content) { if (content instanceof AbstractFile) { + // .. directories near the top of the directory structure can + // pass the isRoot() check, so first check if the name is empty + // (real root directories will have a blank name field) + if (!content.getName().isEmpty()) { + return true; + } return ! ((AbstractFile)content).isRoot(); } - return (getContentType(content) != ContentType.UNSUPPORTED); + return (getDisplayableContentType(content) != ContentType.UNSUPPORTED); } /** @@ -194,7 +215,7 @@ class FileSystemColumnUtils { static List getDisplayableTypesForContentList(List contentList) { List displayableTypes = new ArrayList<>(); for (Content content : contentList) { - ContentType type = getContentType(content); + ContentType type = getDisplayableContentType(content); if (type != ContentType.UNSUPPORTED && ! displayableTypes.contains(type)) { displayableTypes.add(type); } @@ -288,11 +309,25 @@ class FileSystemColumnUtils { return pool.getType().getName(); // We currently use the type name for both the name and type fields } else if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile)content; - return file.getName(); // GVDTODO handle . and .. from getContentDisplayName() + return convertDotDirName(file); } return content.getName(); } + + @NbBundle.Messages({ + "FileSystemColumnUtils.getContentName.dotDir=[current folder]", + "FileSystemColumnUtils.getContentName.dotDotDir=[parent folder]", + }) + public static String convertDotDirName(AbstractFile file) { + if (file.getName().equals("..")) { + return Bundle.FileSystemColumnUtils_getContentName_dotDotDir(); + } else if (file.getName().equals(".")) { + return Bundle.FileSystemColumnUtils_getContentName_dotDir(); + } + return file.getName(); + } + /** * Get the column keys for an abstract file object. * Only use this method if all rows contain AbstractFile objects. @@ -459,7 +494,7 @@ class FileSystemColumnUtils { * * @return The display name. */ - private static String getVolumeDisplayName(Volume vol) { + public static String getVolumeDisplayName(Volume vol) { // set name, display name, and icon String volName = "vol" + Long.toString(vol.getAddr()); long end = vol.getStart() + (vol.getLength() - 1); @@ -481,7 +516,9 @@ class FileSystemColumnUtils { /** * Get the content that should be displayed in the table based on the given object. * Algorithm: - * - If content is already displayable, return it + * - If content is known and known files are being hidden, return an empty list + * - If content is a slack file and slack files are being hidden, return an empty list + * - If content is a displayable type, return it * - If content is a volume system, return its displayable children * - If content is a file system, return the displayable children of the root folder * - If content is the root folder, return the displayable children of the root folder @@ -490,8 +527,41 @@ class FileSystemColumnUtils { * * @return List of content to add to the table. */ - static List getNextDisplayableContent(Content content) throws TskCoreException { + static List getDisplayableContentForTable(Content content) throws TskCoreException { + if (content instanceof AbstractFile) { + AbstractFile file = (AbstractFile)content; + // Skip known files if requested + if (UserPreferences.hideKnownFilesInDataSourcesTree() + && file.getKnown().equals(TskData.FileKnown.KNOWN)) { + return new ArrayList<>(); + } + + // Skip slack files if requested + if (UserPreferences.hideSlackFilesInDataSourcesTree() + && file instanceof SlackFile) { + return new ArrayList<>(); + } + } + + return getDisplayableContentForTableAndTree(content); + } + + /** + * Get the content that should be displayed in the table based on the given object. + * Algorithm: + * - If content is a displayable type, return it + * - If content is a volume system, return its displayable children + * - If content is a file system, return the displayable children of the root folder + * - If content is the root folder, return the displayable children of the root folder + * + * @param content The base content. + * + * @return List of content to add to the table/tree. + * + * @throws TskCoreException + */ + private static List getDisplayableContentForTableAndTree(Content content) throws TskCoreException { // If the given content is displayable, return it if (FileSystemColumnUtils.isDisplayable(content)) { return Arrays.asList(content); @@ -541,4 +611,83 @@ class FileSystemColumnUtils { return new ColumnKey(name, name, Bundle.FileSystemColumnUtils_noDescription()); } + /** + * Get the children of a given content ID that will be visible in the tree. + * + * @param contentId The ID of the parent content. + * + * @return The visible children of the given content. + * + * @throws TskCoreException + * @throws NoCurrentCaseException + */ + public static List getVisibleTreeNodeChildren(Long contentId) throws TskCoreException, NoCurrentCaseException { + SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); + Content content = skCase.getContentById(contentId); + List originalChildren = content.getChildren(); + + // First, advance past anything we don't display (volume systems, file systems, root folders) + List treeChildren = new ArrayList<>(); + for (Content child : originalChildren) { + treeChildren.addAll(FileSystemColumnUtils.getDisplayableContentForTableAndTree(child)); + } + + // Filter out the . and .. directories + for (Iterator iter = treeChildren.listIterator(); iter.hasNext(); ) { + Content c = iter.next(); + if ((c instanceof AbstractFile) && ContentUtils.isDotDirectory((AbstractFile)c)) { + iter.remove(); + } + } + + // Filter out any files without children + for (Iterator iter = treeChildren.listIterator(); iter.hasNext(); ) { + Content c = iter.next(); + if (c instanceof AbstractFile && (! hasDisplayableContentChildren((AbstractFile)c))) { + iter.remove(); + } + } + + return treeChildren; + } + + /** + * Check whether a file has displayable children. + * + * @param file The file to check. + * + * @return True if the file has displayable children, false otherwise. + */ + private static boolean hasDisplayableContentChildren(AbstractFile file) { + if (file != null) { + try { + // If the file has no children at all, then it has no displayable children. + if (!file.hasChildren()) { + return false; + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error checking if the node has children for file with ID: " + file.getId(), ex); //NON-NLS + return false; + } + + String query = "SELECT COUNT(obj_id) AS count FROM " + + " ( SELECT obj_id FROM tsk_objects WHERE par_obj_id = " + file.getId() + " AND type = " + + TskData.ObjectType.ARTIFACT.getObjectType() + + " INTERSECT SELECT artifact_obj_id FROM blackboard_artifacts WHERE obj_id = " + file.getId() + + " AND (artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID() + + " OR artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() + ") " + + " UNION SELECT obj_id FROM tsk_objects WHERE par_obj_id = " + file.getId() + + " AND type = " + TskData.ObjectType.ABSTRACTFILE.getObjectType() + ") AS OBJECT_IDS"; //NON-NLS; + + try (SleuthkitCase.CaseDbQuery dbQuery = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery(query)) { + ResultSet resultSet = dbQuery.getResultSet(); + if (resultSet.next()) { + return (0 < resultSet.getInt("count")); + } + } catch (TskCoreException | SQLException | NoCurrentCaseException ex) { + logger.log(Level.SEVERE, "Error checking if the node has children for file with ID: " + file.getId(), ex); //NON-NLS + } + } + return false; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index b3fa860248..de174d3477 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -20,17 +20,34 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; +import com.google.common.collect.ImmutableSet; import java.beans.PropertyChangeEvent; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsAddedToPersonEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsRemovedFromPersonEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsUpdatedEvent; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import static org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.getExtensionMediaType; import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.DirectoryRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.ImageRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.VolumeRowDTO; @@ -40,11 +57,17 @@ import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.VirtualDirectoryRowD import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.PoolRowDTO; -import static org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.getExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemContentEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemHostEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemPersonEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Directory; +import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LayoutFile; @@ -55,20 +78,40 @@ import org.sleuthkit.datamodel.Pool; import org.sleuthkit.datamodel.SlackFile; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.VirtualDirectory; import org.sleuthkit.datamodel.Volume; +import org.sleuthkit.datamodel.VolumeSystem; /** * */ -public class FileSystemDAO { +public class FileSystemDAO extends AbstractDAO { + + private static final Logger logger = Logger.getLogger(FileSystemDAO.class.getName()); private static final int CACHE_SIZE = 15; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private static final Set HOST_LEVEL_EVTS = ImmutableSet.of( + Case.Events.DATA_SOURCE_ADDED.toString(), + // this should trigger the case to be reopened + // Case.Events.DATA_SOURCE_DELETED.toString(), + Case.Events.DATA_SOURCE_NAME_CHANGED.toString(), + Case.Events.HOSTS_ADDED.toString(), + Case.Events.HOSTS_DELETED.toString(), + Case.Events.HOSTS_UPDATED.toString() + ); + + private static final Set PERSON_LEVEL_EVTS = ImmutableSet.of( + Case.Events.HOSTS_ADDED_TO_PERSON.toString(), + Case.Events.HOSTS_REMOVED_FROM_PERSON.toString() + ); + + private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private static final String FILE_SYSTEM_TYPE_ID = "FILE_SYSTEM"; private static FileSystemDAO instance = null; @@ -79,26 +122,23 @@ public class FileSystemDAO { } return instance; } - - public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, Content eventContent) { - if(!(eventContent instanceof Content)) { - return false; - } - - try { - return key.getContentObjectId() != eventContent.getParent().getId(); - } catch (TskCoreException ex) { - // There is nothing we can do with the exception. + + private boolean isSystemContentInvalidating(FileSystemContentSearchParam key, DAOEvent daoEvent) { + if (!(daoEvent instanceof FileSystemContentEvent)) { return false; } + + FileSystemContentEvent contentEvt = (FileSystemContentEvent) daoEvent; + + return contentEvt.getContentObjectId() == null || key.getContentObjectId().equals(contentEvt.getContentObjectId()); } - - public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, Host eventHost) { - if(!(eventHost instanceof Host)) { + + private boolean isSystemHostInvalidating(FileSystemHostSearchParam key, DAOEvent daoEvent) { + if (!(daoEvent instanceof FileSystemHostEvent)) { return false; } - - return key.getHostObjectId() != eventHost.getHostId(); + + return key.getHostObjectId() == ((FileSystemHostEvent) daoEvent).getHostObjectId(); } private BaseSearchResultsDTO fetchContentForTableFromContent(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -115,7 +155,7 @@ public class FileSystemDAO { parentName = parentContent.getName(); for (Content content : parentContent.getChildren()) { - contentForTable.addAll(FileSystemColumnUtils.getNextDisplayableContent(content)); + contentForTable.addAll(FileSystemColumnUtils.getDisplayableContentForTable(content)); } return fetchContentForTable(cacheKey, contentForTable, parentName); @@ -174,7 +214,7 @@ public class FileSystemDAO { List cellValues = FileSystemColumnUtils.getCellValuesForHost(host); rows.add(new BaseRowDTO(cellValues, FILE_SYSTEM_TYPE_ID, host.getHostId())); } - return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), hostsForTable.size()); + return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, Host.class.getName(), cacheKey.getStartItem(), hostsForTable.size()); } private BaseSearchResultsDTO fetchContentForTable(SearchParams cacheKey, List contentForTable, @@ -229,7 +269,7 @@ public class FileSystemDAO { rows.add(new FileRowDTO( file, file.getId(), - file.getName(), + FileSystemColumnUtils.convertDotDirName(file), file.getNameExtension(), getExtensionMediaType(file.getNameExtension()), file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC), @@ -237,7 +277,7 @@ public class FileSystemDAO { cellValues)); } } - return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), contentForTable.size()); + return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, FILE_SYSTEM_TYPE_ID, cacheKey.getStartItem(), contentForTable.size()); } /** @@ -260,34 +300,268 @@ public class FileSystemDAO { return pagedArtsStream.collect(Collectors.toList()); } - public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { - + public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchContentForTableFromContent(searchParams)); } - public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { - + public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchContentForTableFromHost(searchParams)); } - public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { - + public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); + return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams)); + } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + private Long getHostFromDs(Content dataSource) { + if (!(dataSource instanceof DataSource)) { + return null; } - return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams)); + try { + Host host = ((DataSource) dataSource).getHost(); + return host == null ? null : host.getHostId(); + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "There was an error getting the host for data source with id: " + dataSource.getId(), ex); + return null; + } + } + + /** + * In instances where parents are hidden, refresh the entire tree. + * + * @param parentContent The parent content. + * + * @return True if full tree should be refreshed. + */ + private boolean invalidatesAllFileSystem(Content parentContent) { + if (parentContent instanceof VolumeSystem || parentContent instanceof FileSystem) { + return true; + } + + if (parentContent instanceof Directory) { + Directory dir = (Directory) parentContent; + return dir.isRoot() && !dir.getName().equals(".") && !dir.getName().equals(".."); + } + + if (parentContent instanceof LocalDirectory) { + return ((LocalDirectory) parentContent).isRoot(); + } + + return false; + } + + @Override + Set handleIngestComplete() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set shouldRefreshTree() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + // GVDTODO these can probably be rewritten now that it isn't handling a collection of autopsy events + Set affectedPersons = new HashSet<>(); + Set affectedHosts = new HashSet<>(); + Set affectedParentContent = new HashSet<>(); + boolean refreshAllContent = false; + + Content content = DAOEventUtils.getDerivedFileContentFromFileEvent(evt); + if (content != null) { + Content parentContent; + try { + parentContent = content.getParent(); + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to get parent content of content with id: " + content.getId(), ex); + return Collections.emptySet(); + } + + if (parentContent == null) { + return Collections.emptySet(); + } + + if (invalidatesAllFileSystem(parentContent)) { + refreshAllContent = true; + } else { + affectedParentContent.add(parentContent.getId()); + } + } else if (evt instanceof DataSourceAddedEvent) { + Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource()); + if (hostId != null) { + affectedHosts.add(hostId); + } + } else if (evt instanceof DataSourceNameChangedEvent) { + Long hostId = getHostFromDs(((DataSourceNameChangedEvent) evt).getDataSource()); + if (hostId != null) { + affectedHosts.add(hostId); + } + } else if (evt instanceof HostsAddedEvent) { + // GVDTODO how best to handle host added? + } else if (evt instanceof HostsUpdatedEvent) { + // GVDTODO how best to handle host updated? + } else if (evt instanceof HostsAddedToPersonEvent) { + Person person = ((HostsAddedToPersonEvent) evt).getPerson(); + affectedPersons.add(person == null ? null : person.getPersonId()); + } else if (evt instanceof HostsRemovedFromPersonEvent) { + Person person = ((HostsRemovedFromPersonEvent) evt).getPerson(); + affectedPersons.add(person == null ? null : person.getPersonId()); + } + + final boolean triggerFullRefresh = refreshAllContent; + + // GVDTODO handling null ids versus the 'No Persons' option + ConcurrentMap, BaseSearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + Object searchParams = k.getParamData(); + if (searchParams instanceof FileSystemPersonSearchParam) { + FileSystemPersonSearchParam personParam = (FileSystemPersonSearchParam) searchParams; + if (affectedPersons.contains(personParam.getPersonObjectId())) { + concurrentMap.remove(k); + } + } else if (searchParams instanceof FileSystemHostSearchParam) { + FileSystemHostSearchParam hostParams = (FileSystemHostSearchParam) searchParams; + if (affectedHosts.contains(hostParams.getHostObjectId())) { + concurrentMap.remove(k); + } + } else if (searchParams instanceof FileSystemContentSearchParam) { + FileSystemContentSearchParam contentParams = (FileSystemContentSearchParam) searchParams; + if (triggerFullRefresh + || contentParams.getContentObjectId() == null + || affectedParentContent.contains(contentParams.getContentObjectId())) { + concurrentMap.remove(k); + } + } + }); + + Stream fileEvts = triggerFullRefresh + ? Stream.of(new FileSystemContentEvent(null)) + : affectedParentContent.stream().map(id -> new FileSystemContentEvent(id)); + + return Stream.of( + affectedPersons.stream().map(id -> new FileSystemPersonEvent(id)), + affectedHosts.stream().map(id -> new FileSystemHostEvent(id)), + fileEvts + ) + .flatMap(s -> s) + .collect(Collectors.toSet()); + } + + /** + * Get all data sources belonging to a given host. + * + * @param host The host. + * + * @return Results containing all data sources for the given host. + * + * @throws ExecutionException + */ + public TreeResultsDTO getDataSourcesForHost(Host host) throws ExecutionException { + try { + List> treeItemRows = new ArrayList<>(); + for (DataSource ds : Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().getDataSourcesForHost(host)) { + treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( + ds.getClass().getSimpleName(), + new FileSystemContentSearchParam(ds.getId()), + ds, + ds.getName(), + null + )); + } + return new TreeResultsDTO<>(treeItemRows); + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching images for host with ID " + host.getHostId(), ex); + } + } + + /** + * Create results for a single given data source ID (not its children). + * + * @param dataSourceObjId The data source object ID. + * + * @return Results containing just this data source. + * + * @throws ExecutionException + */ + public TreeResultsDTO getSingleDataSource(long dataSourceObjId) throws ExecutionException { + try { + List> treeItemRows = new ArrayList<>(); + DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceObjId); + treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( + ds.getClass().getSimpleName(), + new FileSystemContentSearchParam(ds.getId()), + ds, + ds.getName(), + null + )); + + return new TreeResultsDTO<>(treeItemRows); + } catch (NoCurrentCaseException | TskCoreException | TskDataException ex) { + throw new ExecutionException("An error occurred while fetching data source with ID " + dataSourceObjId, ex); + } + } + + /** + * Get the children that will be displayed in the tree for a given content ID. + * + * @param contentId Object ID of parent content. + * + * @return The results. + * + * @throws ExecutionException + */ + public TreeResultsDTO getDisplayableContentChildren(Long contentId) throws ExecutionException { + try { + + List treeChildren = FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId); + + List> treeItemRows = new ArrayList<>(); + for (Content child : treeChildren) { + Long countForNode = null; + if ((child instanceof AbstractFile) + && ! (child instanceof LocalFilesDataSource)) { + countForNode = getContentForTable(new FileSystemContentSearchParam(child.getId()), 0, null).getTotalResultsCount(); + } + treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( + child.getClass().getSimpleName(), + new FileSystemContentSearchParam(child.getId()), + child, + getNameForContent(child), + countForNode == null ? TreeDisplayCount.NOT_SHOWN : TreeDisplayCount.getDeterminate(countForNode) + )); + } + return new TreeResultsDTO<>(treeItemRows); + + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); + } + } + + /** + * Get display name for the given content. + * + * @param content The content. + * + * @return Display name for the content. + */ + private String getNameForContent(Content content) { + if (content instanceof Volume) { + return FileSystemColumnUtils.getVolumeDisplayName((Volume)content); + } else if (content instanceof AbstractFile) { + return FileSystemColumnUtils.convertDotDirName((AbstractFile) content); + } + return content.getName(); } /** @@ -304,19 +578,18 @@ public class FileSystemDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getFileSystemDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected FileSystemDAO getDAO() { + return MainDAO.getInstance().getFileSystemDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = getContentFromEvt(evt); - if (content == null) { - return false; - } + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - return MainDAO.getInstance().getFileSystemDAO().isSystemContentInvalidating(getParameters(), content); + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isSystemContentInvalidating(this.getParameters(), evt); } } @@ -331,16 +604,18 @@ public class FileSystemDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getFileSystemDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected FileSystemDAO getDAO() { + return MainDAO.getInstance().getFileSystemDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - // TODO implement the method for determining if - // a refresh is needed. - return false; + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isSystemHostInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java index c9bc49c5e6..1a1eb91917 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java @@ -25,51 +25,6 @@ import java.util.Objects; */ public class FileTypeSizeSearchParams { - public enum FileSizeFilter { - SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS - SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS - SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null); //NON-NLS - private final int id; - private final String name; - private final String displayName; - private long minBound; - private Long maxBound; - - private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) { - this.id = id; - this.name = name; - this.displayName = displayName; - this.minBound = minBound; - this.maxBound = maxBound; - } - - public String getName() { - return this.name; - } - - public int getId() { - return this.id; - } - - public String getDisplayName() { - return this.displayName; - } - - /** - * @return The minimum inclusive bound (non-null). - */ - public long getMinBound() { - return minBound; - } - - /** - * @return The maximum exclusive bound (if null, no upper limit). - */ - public Long getMaxBound() { - return maxBound; - } - - } private final FileSizeFilter sizeFilter; private final Long dataSourceId; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 56a65af4ec..44de97b935 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -18,22 +18,149 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventBatcher; +import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.beans.PropertyChangeSupport; +import java.util.Collection; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.prefs.PreferenceChangeListener; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.collections4.CollectionUtils; +import org.python.google.common.collect.ImmutableSet; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; + /** * Main entry point for DAO for providing data to populate the data results * viewer. */ -public class MainDAO { +public class MainDAO extends AbstractDAO { + + private static final Logger logger = Logger.getLogger(MainDAO.class.getName()); + + private static final Set INGEST_JOB_EVENTS = EnumSet.of( + IngestManager.IngestJobEvent.COMPLETED, + IngestManager.IngestJobEvent.CANCELLED + ); + + private static final Set INGEST_MODULE_EVENTS = EnumSet.of( + IngestManager.IngestModuleEvent.CONTENT_CHANGED, + IngestManager.IngestModuleEvent.DATA_ADDED, + IngestManager.IngestModuleEvent.FILE_DONE + ); + + private static final Set QUEUED_CASE_EVENTS = ImmutableSet.of( + Case.Events.OS_ACCOUNTS_ADDED.toString(), + Case.Events.OS_ACCOUNTS_UPDATED.toString(), + Case.Events.OS_ACCOUNTS_DELETED.toString(), + Case.Events.OS_ACCT_INSTANCES_ADDED.toString() + ); + + private static final long WATCH_RESOLUTION_MILLIS = 30 * 1000; + + private static final long RESULT_BATCH_MILLIS = 5 * 1000; private static MainDAO instance = null; public synchronized static MainDAO getInstance() { if (instance == null) { instance = new MainDAO(); + instance.init(); } return instance; } + /** + * The case event listener. + */ + private final PropertyChangeListener caseEventListener = (evt) -> { + try { + if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { + this.clearCaches(); + } else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) { + handleEvent(evt, false); + } else { + // handle case events immediately + handleEvent(evt, true); + } + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling case events", ex); + } + }; + + /** + * The user preference listener. + */ + private final PreferenceChangeListener userPreferenceListener = (evt) -> { + try { + this.clearCaches(); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling user preference change", ex); + } + + }; + + /** + * The ingest module event listener. + */ + private final PropertyChangeListener ingestModuleEventListener = (evt) -> { + try { + handleEvent(evt, false); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling ingest module event", ex); + } + }; + + /** + * The ingest job event listener. + */ + private final PropertyChangeListener ingestJobEventListener = (evt) -> { + try { + handleEventFlush(); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling ingest job event", ex); + } + + }; + + private final ScheduledThreadPoolExecutor timeoutExecutor + = new ScheduledThreadPoolExecutor(1, + new ThreadFactoryBuilder().setNameFormat(MainDAO.class.getName()).build()); + + private final PropertyChangeManager resultEventsManager = new PropertyChangeManager(); + private final PropertyChangeManager treeEventsManager = new PropertyChangeManager(); + + private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>( + (evts) -> { + try { + fireResultEvts(evts); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling batched dao events", ex); + } + }, + RESULT_BATCH_MILLIS); + private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance(); private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance(); private final ViewsDAO viewsDAO = ViewsDAO.getInstance(); @@ -42,10 +169,54 @@ public class MainDAO { private final OsAccountsDAO osAccountsDAO = OsAccountsDAO.getInstance(); private final CommAccountsDAO commAccountsDAO = CommAccountsDAO.getInstance(); + // NOTE: whenever adding a new sub-dao, it should be added to this list for event updates. + private final List allDAOs = ImmutableList.of( + dataArtifactDAO, + analysisResultDAO, + viewsDAO, + fileSystemDAO, + tagsDAO, + osAccountsDAO, + commAccountsDAO); + + /** + * Registers listeners with autopsy event publishers and starts internal + * threads. + */ + void init() { + IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); + Case.addPropertyChangeListener(caseEventListener); + UserPreferences.addChangeListener(userPreferenceListener); + + this.timeoutExecutor.scheduleAtFixedRate( + () -> { + try { + handleTreeEventTimeouts(); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling tree event timeouts", ex); + } + }, + WATCH_RESOLUTION_MILLIS, + WATCH_RESOLUTION_MILLIS, + TimeUnit.MILLISECONDS); + } + + /** + * Unregisters listeners from autopsy event publishers. + */ + void unregister() { + IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); + Case.removePropertyChangeListener(caseEventListener); + UserPreferences.removeChangeListener(userPreferenceListener); + } + public DataArtifactDAO getDataArtifactsDAO() { return dataArtifactDAO; } - + public AnalysisResultDAO getAnalysisResultDAO() { return analysisResultDAO; } @@ -53,20 +224,145 @@ public class MainDAO { public ViewsDAO getViewsDAO() { return viewsDAO; } - + public FileSystemDAO getFileSystemDAO() { return fileSystemDAO; } - + public TagsDAO getTagsDAO() { return tagsDAO; } - + public OsAccountsDAO getOsAccountsDAO() { return osAccountsDAO; } - + public CommAccountsDAO getCommAccountsDAO() { return commAccountsDAO; } + + public PropertyChangeManager getResultEventsManager() { + return this.resultEventsManager; + } + + public PropertyChangeManager getTreeEventsManager() { + return treeEventsManager; + } + + @Override + void clearCaches() { + allDAOs.forEach((subDAO) -> subDAO.clearCaches()); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + return allDAOs.stream() + .map(subDAO -> subDAO.processEvent(evt)) + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toSet()); + } + + @Override + Set shouldRefreshTree() { + return allDAOs.stream() + .map((subDAO) -> subDAO.shouldRefreshTree()) + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toSet()); + } + + @Override + Set handleIngestComplete() { + List> daoStreamEvts = allDAOs.stream() + .map((subDAO) -> subDAO.handleIngestComplete()) + .collect(Collectors.toList()); + + daoStreamEvts.add(eventBatcher.flushEvents()); + + return daoStreamEvts.stream() + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toSet()); + } + + /** + * Processes and handles an autopsy event. + * + * @param evt The event. + * @param immediateResultAction If true, result events are immediately + * fired. Otherwise, the result events are + * batched. + */ + private void handleEvent(PropertyChangeEvent evt, boolean immediateResultAction) { + Collection daoEvts = processEvent(evt); + + Map> daoEvtsByType = daoEvts.stream() + .collect(Collectors.groupingBy(e -> e.getType(), Collectors.toSet())); + + fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); + + Set resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); + if (immediateResultAction) { + fireResultEvts(resultEvts); + } else { + eventBatcher.enqueueAllEvents(resultEvts); + } + } + + private void handleEventFlush() { + Collection daoEvts = handleIngestComplete(); + + Map> daoEvtsByType = daoEvts.stream() + .collect(Collectors.groupingBy(e -> e.getType(), Collectors.toSet())); + + fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); + + Set resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); + fireResultEvts(resultEvts); + } + + private void fireResultEvts(Set resultEvts) { + if (CollectionUtils.isNotEmpty(resultEvts)) { + resultEventsManager.firePropertyChange("DATA_CHANGE", null, new DAOAggregateEvent(resultEvts)); + } + } + + private void fireTreeEvts(Set treeEvts) { + if (CollectionUtils.isNotEmpty(treeEvts)) { + treeEventsManager.firePropertyChange("TREE_CHANGE", null, new DAOAggregateEvent(treeEvts)); + } + } + + private void handleTreeEventTimeouts() { + fireTreeEvts(this.shouldRefreshTree()); + } + + @Override + protected void finalize() throws Throwable { + unregister(); + } + + /** + * A wrapper around property change support that exposes + * addPropertyChangeListener and removePropertyChangeListener so that + * netbeans weak listeners can automatically unregister. + */ + public static class PropertyChangeManager { + + private final PropertyChangeSupport support = new PropertyChangeSupport(this); + + public void addPropertyChangeListener(PropertyChangeListener listener) { + support.addPropertyChangeListener(listener); + } + + public void removePropertyChangeListener(PropertyChangeListener listener) { + support.removePropertyChangeListener(listener); + } + + PropertyChangeListener[] getPropertyChangeListeners() { + return support.getPropertyChangeListeners(); + } + + void firePropertyChange(String propertyName, Object oldValue, Object newValue) { + support.firePropertyChange(propertyName, oldValue, newValue); + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java new file mode 100644 index 0000000000..f6c44d2433 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java @@ -0,0 +1,102 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; + +/** + * + */ +public class MediaTypeUtils { + + public enum ExtensionMediaType { + IMAGE, VIDEO, AUDIO, DOC, EXECUTABLE, TEXT, WEB, PDF, ARCHIVE, UNCATEGORIZED + } + + public static ExtensionMediaType getExtensionMediaType(String ext) { + if (StringUtils.isBlank(ext)) { + return ExtensionMediaType.UNCATEGORIZED; + } else { + ext = "." + ext; + } + if (FileTypeExtensions.getImageExtensions().contains(ext)) { + return ExtensionMediaType.IMAGE; + } else if (FileTypeExtensions.getVideoExtensions().contains(ext)) { + return ExtensionMediaType.VIDEO; + } else if (FileTypeExtensions.getAudioExtensions().contains(ext)) { + return ExtensionMediaType.AUDIO; + } else if (FileTypeExtensions.getDocumentExtensions().contains(ext)) { + return ExtensionMediaType.DOC; + } else if (FileTypeExtensions.getExecutableExtensions().contains(ext)) { + return ExtensionMediaType.EXECUTABLE; + } else if (FileTypeExtensions.getTextExtensions().contains(ext)) { + return ExtensionMediaType.TEXT; + } else if (FileTypeExtensions.getWebExtensions().contains(ext)) { + return ExtensionMediaType.WEB; + } else if (FileTypeExtensions.getPDFExtensions().contains(ext)) { + return ExtensionMediaType.PDF; + } else if (FileTypeExtensions.getArchiveExtensions().contains(ext)) { + return ExtensionMediaType.ARCHIVE; + } else { + return ExtensionMediaType.UNCATEGORIZED; + } + } + + /** + * Gets the path to the icon file that should be used to visually represent + * an AbstractFile, using the file name extension to select the icon. + * + * @param file An AbstractFile. + * + * @return An icon file path. + */ + public static String getIconForFileType(ExtensionMediaType fileType) { + if (fileType == null) { + return "org/sleuthkit/autopsy/images/file-icon.png"; + } + + switch (fileType) { + case IMAGE: + return "org/sleuthkit/autopsy/images/image-file.png"; + case VIDEO: + return "org/sleuthkit/autopsy/images/video-file.png"; + case AUDIO: + return "org/sleuthkit/autopsy/images/audio-file.png"; + case DOC: + return "org/sleuthkit/autopsy/images/doc-file.png"; + case EXECUTABLE: + return "org/sleuthkit/autopsy/images/exe-file.png"; + case TEXT: + return "org/sleuthkit/autopsy/images/text-file.png"; + case WEB: + return "org/sleuthkit/autopsy/images/web-file.png"; + case PDF: + return "org/sleuthkit/autopsy/images/pdf-file.png"; + case ARCHIVE: + return "org/sleuthkit/autopsy/images/archive-file.png"; + default: + case UNCATEGORIZED: + return "org/sleuthkit/autopsy/images/file-icon.png"; + } + } + + private MediaTypeUtils() { + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 98b32b6deb..78020b4dbf 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -18,24 +18,31 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; +import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.OsAccountEvent; import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.OsAccountRowDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.TskCoreException; @@ -55,7 +62,7 @@ import org.sleuthkit.datamodel.TskCoreException; "OsAccountsDAO.createSheet.comment.displayName=C", "OsAccountsDAO.createSheet.count.displayName=O", "OsAccountsDAO.fileColumns.noDescription=No Description",}) -public class OsAccountsDAO { +public class OsAccountsDAO extends AbstractDAO { private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; @@ -75,6 +82,13 @@ public class OsAccountsDAO { getFileColumnKey(Bundle.OsAccountsDAO_accountRealmNameProperty_displayName()), getFileColumnKey(Bundle.OsAccountsDAO_createdTimeProperty_displayName())); + private static final Set OS_EVENTS = ImmutableSet.of( + Case.Events.OS_ACCOUNTS_ADDED.toString(), + Case.Events.OS_ACCOUNTS_DELETED.toString(), + Case.Events.OS_ACCOUNTS_UPDATED.toString(), + Case.Events.OS_ACCT_INSTANCES_ADDED.toString() + ); + private static OsAccountsDAO instance = null; synchronized static OsAccountsDAO getInstance() { @@ -89,7 +103,7 @@ public class OsAccountsDAO { return new ColumnKey(name, name, Bundle.OsAccountsDAO_fileColumns_noDescription()); } - public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key == null) { throw new IllegalArgumentException("Search parameters are null"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -97,13 +111,13 @@ public class OsAccountsDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams)); } + private boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) { + return evt instanceof OsAccountEvent; + } + /** * Returns a list of paged OS Accounts results. * @@ -163,7 +177,35 @@ public class OsAccountsDAO { cellValues)); }; - return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, 0, allAccounts.size()); + return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, OS_ACCOUNTS_TYPE_ID, 0, allAccounts.size()); + } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + @Override + Set handleIngestComplete() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set shouldRefreshTree() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + if (!OS_EVENTS.contains(evt.getPropertyName())) { + return Collections.emptySet(); + } + + this.searchParamsCache.invalidateAll(); + + return Collections.singleton(new OsAccountEvent()); } /** @@ -180,19 +222,18 @@ public class OsAccountsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getOsAccountsDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected OsAccountsDAO getDAO() { + return MainDAO.getInstance().getOsAccountsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - String eventType = evt.getPropertyName(); - if (eventType.equals(Case.Events.OS_ACCOUNTS_ADDED.toString()) - || eventType.equals(Case.Events.OS_ACCOUNTS_DELETED.toString())) { - return true; - } - return false; + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isOSAccountInvalidatingEvt(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java index 8f9e4b9caa..8346c2e1b7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java @@ -21,20 +21,28 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; /** - * - * @author gregd + * Interface for all search results that are used to display in the table/DataResultViewer area. */ public interface SearchResultsDTO { + // returns the type of data String getTypeId(); + + // Returns a unique signature for the type of data. Used keep track of custom column ordering. + String getSignature(); + // Text to display at top of the table about the type of the results. String getDisplayName(); + // Sorted list of column headers. The RowDTO column values will be in the same order List getColumns(); + // Page-sized, sorted list of rows to display List getItems(); + // total number of results (could be bigger than what is in the results) long getTotalResultsCount(); + // Index in the total results that this set/page starts at long getStartItem(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java new file mode 100644 index 0000000000..53e2f7cfb3 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java @@ -0,0 +1,110 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import com.google.common.cache.Cache; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; + +/** + * Utilities for common actions in the sub DAOs. + */ +public class SubDAOUtils { + + /** + * Using a digest of event information, clears keys in a cache that may be + * effected by events. + * + * @param cache The cache. + * @param getKeys Using a key from a cache, provides a tuple + * of the relevant key in the data source + * mapping and the data source id (or null if + * no data source filtering). + * @param itemDataSourceMapping The event digest. + */ + static void invalidateKeys(Cache, ?> cache, Function> getKeys, Map> itemDataSourceMapping) { + invalidateKeys(cache, getKeys, Collections.singletonList(itemDataSourceMapping)); + } + + /** + * Using a digest of event information, clears keys in a cache that may be + * effected by events. + * + * @param cache The cache. + * @param getKeys Using a key from a cache, provides a tuple + * of the relevant key in the data source + * mapping and the data source id (or null if + * no data source filtering). + * @param itemDataSourceMapping The list of event digests. + */ + static void invalidateKeys(Cache, ?> cache, Function> getKeys, List>> itemDataSourceMapping) { + ConcurrentMap, ?> concurrentMap = cache.asMap(); + concurrentMap.forEach((k, v) -> { + Pair pairItems = getKeys.apply(k.getParamData()); + T searchParamsKey = pairItems.getLeft(); + Long searchParamsDsId = pairItems.getRight(); + for (Map> itemDsMapping : itemDataSourceMapping) { + Set dsIds = itemDsMapping.get(searchParamsKey); + if (dsIds != null && (searchParamsDsId == null || dsIds.contains(searchParamsDsId))) { + concurrentMap.remove(k); + } + } + }); + } + + /** + * Returns a set of tree events gathered from the TreeCounts instance after + * calling flushEvents. + * + * @param treeCounts The tree counts instance. + * @param converter The means of acquiring a tree item dto to be placed in + * the TreeEvent. + * + * @return The generated tree events. + */ + static Set getIngestCompleteEvents(TreeCounts treeCounts, Function> converter) { + return treeCounts.flushEvents().stream() + .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) + .collect(Collectors.toSet()); + } + + /** + * Returns a set of tree events gathered from the TreeCounts instance after + * calling getEventTimeouts. + * + * @param treeCounts The tree counts instance. + * @param converter The means of acquiring a tree item dto to be placed in + * the TreeEvent. + * + * @return The generated tree events. + */ + static Set getRefreshEvents(TreeCounts treeCounts, Function> converter) { + return treeCounts.getEventTimeouts().stream() + .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) + .collect(Collectors.toSet()); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index bf5e844e72..37c23ef375 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -18,18 +18,29 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.tuple.Triple; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; @@ -40,7 +51,9 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; -import org.sleuthkit.autopsy.events.AutopsyEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType; +import org.sleuthkit.autopsy.mainui.datamodel.events.TagsEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifactTag; @@ -52,7 +65,7 @@ import org.sleuthkit.datamodel.TskCoreException; /** * Provides information to populate the results viewer for data in the allTags - section. + * section. */ @Messages({"TagsDAO.fileColumns.nameColLbl=Name", "TagsDAO.fileColumns.originalName=Original Name", @@ -72,15 +85,15 @@ import org.sleuthkit.datamodel.TskCoreException; "TagsDAO.tagColumns.typeColLbl=Result Type", "TagsDAO.tagColumns.commentColLbl=Comment", "TagsDAO.tagColumns.userNameColLbl=User Name"}) -public class TagsDAO { +public class TagsDAO extends AbstractDAO { private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; - private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); - + private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; + private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private static final String USER_NAME_PROPERTY = "user.name"; //NON-NLS - + private static final List FILE_TAG_COLUMNS = Arrays.asList( getFileColumnKey(Bundle.TagsDAO_fileColumns_nameColLbl()), getFileColumnKey(Bundle.TagsDAO_fileColumns_originalName()), // GVDTODO handle translation @@ -115,8 +128,8 @@ public class TagsDAO { private static ColumnKey getFileColumnKey(String name) { return new ColumnKey(name, name, Bundle.TagsDAO_fileColumns_noDescription()); } - - public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + + public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getTagName() == null) { throw new IllegalArgumentException("Must have non-null tag name"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -124,17 +137,13 @@ public class TagsDAO { } else if (key.getTagType() == null) { throw new IllegalArgumentException("Must have non-null tag type"); } - - SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } + SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); return searchParamsCache.get(searchParams, () -> fetchTagsDTOs(searchParams)); - } + } @NbBundle.Messages({"FileTag.name.text=File Tag", - "ResultTag.name.text=Result Tag"}) + "ResultTag.name.text=Result Tag"}) private SearchResultsDTO fetchTagsDTOs(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { switch (cacheKey.getParamData().getTagType()) { case FILE: @@ -145,7 +154,7 @@ public class TagsDAO { throw new IllegalArgumentException("Unsupported tag type"); } } - + /** * Returns a list of paged tag results. * @@ -170,7 +179,7 @@ public class TagsDAO { Long dataSourceId = cacheKey.getParamData().getDataSourceId(); TagName tagName = cacheKey.getParamData().getTagName(); - + // get all tag results List allTags = new ArrayList<>(); List artifactTags = (dataSourceId != null && dataSourceId > 0) @@ -186,21 +195,21 @@ public class TagsDAO { } else { allTags.addAll(artifactTags); } - + // get current page of tag results List pagedTags = getPaged(allTags, cacheKey); List fileRows = new ArrayList<>(); for (Tag tag : pagedTags) { BlackboardArtifactTag blackboardTag = (BlackboardArtifactTag) tag; - + String name = blackboardTag.getContent().getName(); // As a backup. try { name = blackboardTag.getArtifact().getShortDescription(); } catch (TskCoreException ignore) { // it's a WARNING, skip } - + String contentPath; try { contentPath = blackboardTag.getContent().getUniquePath(); @@ -221,14 +230,14 @@ public class TagsDAO { blackboardTag.getId())); } - return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size()); + return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, BlackboardArtifactTag.class.getName(), 0, allTags.size()); } - + private SearchResultsDTO fetchFileTags(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { Long dataSourceId = cacheKey.getParamData().getDataSourceId(); TagName tagName = cacheKey.getParamData().getTagName(); - + // get all tag results List allTags = new ArrayList<>(); List contentTags = (dataSourceId != null && dataSourceId > 0) @@ -244,10 +253,10 @@ public class TagsDAO { } else { allTags.addAll(contentTags); } - + // get current page of tag results List pagedTags = getPaged(allTags, cacheKey); - + List fileRows = new ArrayList<>(); for (Tag tag : pagedTags) { ContentTag contentTag = (ContentTag) tag; @@ -274,9 +283,136 @@ public class TagsDAO { file.getId())); } - return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size()); + return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, ContentTag.class.getName(), 0, allTags.size()); } - + + /** + * Returns true if the DAO event could have an impact on the given search + * params. + * + * @param tagParams The tag params. + * @param daoEvt The DAO event. + * + * @return True if the event could affect the results of the search params. + */ + private boolean isTagsInvalidatingEvent(TagsSearchParams tagParams, DAOEvent daoEvt) { + if (!(daoEvt instanceof TagsEvent)) { + return false; + } + + TagsEvent tagEvt = (TagsEvent) daoEvt; + return (tagParams.getTagName().getId() == tagEvt.getTagNameId() + && tagParams.getTagType().equals(tagEvt.getTagType()) + && (tagParams.getDataSourceId() == null + || tagEvt.getDataSourceId() == null + || tagParams.getDataSourceId() == tagEvt.getDataSourceId())); + } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + @Override + Set handleIngestComplete() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set shouldRefreshTree() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + // GVDTODO this may be rewritten simpler now that it isn't processing a list of events + Map, Set>> mapping = new HashMap<>(); + + // tag type, tag name id, data source id (or null if unknown) + Triple data = getTagData(evt); + if (data != null) { + mapping.computeIfAbsent(Pair.of(data.getLeft(), data.getMiddle()), k -> new HashSet<>()) + .add(Optional.ofNullable(data.getRight())); + } + + + // don't continue if no mapping entries + if (mapping.isEmpty()) { + return Collections.emptySet(); + } + + ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + TagsSearchParams paramData = k.getParamData(); + Set> affectedDataSources = mapping.get(Pair.of(paramData.getTagType(), paramData.getTagName().getId())); + // we only clear key if the tag name / type line up and either the parameters data source wasn't specified, + // there is a wild card data source for the event, or the data source is contained in the list of data sources + // affected by the event + if (affectedDataSources != null + && (paramData.getDataSourceId() == null + || affectedDataSources.contains(Optional.empty()) + || affectedDataSources.contains(Optional.of(paramData.getDataSourceId())))) { + concurrentMap.remove(k); + } + }); + + return mapping.entrySet().stream() + .flatMap(entry -> { + TagType tagType = entry.getKey().getLeft(); + Long tagNameId = entry.getKey().getRight(); + + return entry.getValue().stream() + .map((dsIdOpt) -> new TagsEvent(tagType, tagNameId, dsIdOpt.orElse(null))); + }) + .collect(Collectors.toSet()); + } + + /** + * Returns tag information from an event or null if no tag information + * found. + * + * @param evt The autopsy event. + * + * @return tag type, tag name id, data source id (or null if none determined + * from event). + */ + private Triple getTagData(PropertyChangeEvent evt) { + if (evt instanceof BlackBoardArtifactTagAddedEvent) { + BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; + // ensure tag added event has a valid content id + if (event.getAddedTag() != null + && event.getAddedTag().getContent() != null + && event.getAddedTag().getArtifact() != null) { + return Triple.of(TagType.RESULT, event.getAddedTag().getName().getId(), event.getAddedTag().getArtifact().getDataSourceObjectID()); + } + + } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { + BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; + BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); + if (deletedTagInfo != null) { + return Triple.of(TagType.RESULT, deletedTagInfo.getName().getId(), null); + } + } else if (evt instanceof ContentTagAddedEvent) { + ContentTagAddedEvent event = (ContentTagAddedEvent) evt; + // ensure tag added event has a valid content id + if (event.getAddedTag() != null && event.getAddedTag().getContent() != null) { + Content content = event.getAddedTag().getContent(); + Long dsId = content instanceof AbstractFile ? ((AbstractFile) content).getDataSourceObjectId() : null; + return Triple.of(TagType.FILE, event.getAddedTag().getName().getId(), dsId); + } + } else if (evt instanceof ContentTagDeletedEvent) { + ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; + // ensure tag deleted event has a valid content id + ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); + if (deletedTagInfo != null) { + return Triple.of(TagType.FILE, deletedTagInfo.getName().getId(), null); + } + } + return null; + } + /** * Handles fetching and paging of data for allTags. */ @@ -291,78 +427,18 @@ public class TagsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getTagsDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected TagsDAO getDAO() { + return MainDAO.getInstance().getTagsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - TagsSearchParams params = this.getParameters(); - String eventType = evt.getPropertyName(); - - // handle artifact/result tag changes - if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString()) - || eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) { - - // ignore non-artifact/result tag changes - if (params.getTagType() != TagsSearchParams.TagType.RESULT) { - return false; - } - - if (evt instanceof AutopsyEvent) { - if (evt instanceof BlackBoardArtifactTagAddedEvent) { - // An artifact associated with the current case has been tagged. - BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; - // ensure tag added event has a valid content id - if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) { - return false; - } - return params.getTagName().getId() == event.getAddedTag().getId(); - } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { - // A tag has been removed from an artifact associated with the current case. - BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; - // ensure tag deleted event has a valid content id - BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); - if (deletedTagInfo == null) { - return false; - } - return params.getTagName().getId() == deletedTagInfo.getTagID(); - } - } - } - - // handle file/content tag changes - if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString()) - || eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { - - // ignore non-file/content tag changes - if (params.getTagType() != TagsSearchParams.TagType.FILE) { - return false; - } + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - if (evt instanceof AutopsyEvent) { - if (evt instanceof ContentTagAddedEvent) { - // Content associated with the current case has been tagged. - ContentTagAddedEvent event = (ContentTagAddedEvent) evt; - // ensure tag added event has a valid content id - if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) { - return false; - } - return params.getTagName().getId() == event.getAddedTag().getId(); - } else if (evt instanceof ContentTagDeletedEvent) { - // A tag has been removed from content associated with the current case. - ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; - // ensure tag deleted event has a valid content id - ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); - if (deletedTagInfo == null) { - return false; - } - return params.getTagName().getId() == deletedTagInfo.getTagID(); - } - } - } - return false; + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isTagsInvalidatingEvent(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 2d42b4464e..81f176190d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; +import java.util.Objects; /** * A list of items to display in the tree. @@ -43,6 +44,85 @@ public class TreeResultsDTO { return items; } + /** + * Captures the count to be displayed in the UI. + */ + public static class TreeDisplayCount { + + public enum Type { + DETERMINATE, + INDETERMINATE, + NOT_SHOWN, + UNSPECIFIED + } + + private final Type type; + private final long count; + + public static final TreeDisplayCount INDETERMINATE = new TreeDisplayCount(Type.INDETERMINATE, -1); + public static final TreeDisplayCount NOT_SHOWN = new TreeDisplayCount(Type.NOT_SHOWN, -1); + public static final TreeDisplayCount UNSPECIFIED = new TreeDisplayCount(Type.UNSPECIFIED, -1); + + public static TreeDisplayCount getDeterminate(long count) { + return new TreeDisplayCount(Type.DETERMINATE, count); + } + + private TreeDisplayCount(Type type, long count) { + this.type = type; + this.count = count; + } + + public Type getType() { + return type; + } + + public long getCount() { + return count; + } + + public String getDisplaySuffix() { + switch (this.type) { + case DETERMINATE: + return " (" + count + ")"; + case INDETERMINATE: + return "..."; + case NOT_SHOWN: + default: + return ""; + } + } + + @Override + public int hashCode() { + int hash = 5; + hash = 97 * hash + Objects.hashCode(this.type); + hash = 97 * hash + (int) (this.count ^ (this.count >>> 32)); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final TreeDisplayCount other = (TreeDisplayCount) obj; + if (this.count != other.count) { + return false; + } + if (this.type != other.type) { + return false; + } + return true; + } + + } + /** * A result providing a category and a count for that category. Equals and * hashCode are based on id, type id, and type data. @@ -51,29 +131,28 @@ public class TreeResultsDTO { private final String displayName; private final String typeId; - private final Long count; - private final T typeData; + private final TreeDisplayCount count; + private final T searchParams; private final Object id; /** * Main constructor. * - * @param typeId The id of this item type. - * @param typeData Data for this particular row's type (i.e. - * BlackboardArtifact.Type for counts of a particular - * artifact type). - * @param id The id of this row. Can be any object that - * implements equals and hashCode. - * @param displayName The display name of this row. - * @param count The count of results for this row or null if not - * applicable. + * @param typeId The id of this item type. + * @param searchParams Search params for this tree item that can be used + * to display results. + * @param id The id of this row. Can be any object that + * implements equals and hashCode. + * @param displayName The display name of this row. + * @param count The count of results for this row or null if not + * applicable. */ - public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) { + public TreeItemDTO(String typeId, T searchParams, Object id, String displayName, TreeDisplayCount count) { this.typeId = typeId; this.id = id; this.displayName = displayName; this.count = count; - this.typeData = typeData; + this.searchParams = searchParams; } /** @@ -86,18 +165,17 @@ public class TreeResultsDTO { /** * @return The count of results for this row or null if not applicable. */ - public Long getCount() { + public TreeDisplayCount getDisplayCount() { return count; } /** * - * @return Data for this particular row's type (i.e. - * BlackboardArtifact.Type for counts of a particular artifact - * type). + * @return Search params for this tree item that can be used to display + * results. */ - public T getTypeData() { - return typeData; + public T getSearchParams() { + return searchParams; } /** @@ -114,7 +192,5 @@ public class TreeResultsDTO { public String getTypeId() { return typeId; } - - } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 6f88d47289..1070fd9907 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -18,16 +18,22 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; @@ -35,18 +41,22 @@ import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTree; import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree; -import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; -import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeExtensionsEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeMimeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeSizeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -55,7 +65,7 @@ import org.sleuthkit.datamodel.TskData; * Provides information to populate the results viewer for data in the views * section. */ -public class ViewsDAO { +public class ViewsDAO extends AbstractDAO { private static final Logger logger = Logger.getLogger(ViewsDAO.class.getName()); @@ -76,40 +86,11 @@ public class ViewsDAO { return instance; } - static ExtensionMediaType getExtensionMediaType(String ext) { - if (StringUtils.isBlank(ext)) { - return ExtensionMediaType.UNCATEGORIZED; - } else { - ext = "." + ext; - } - if (FileTypeExtensions.getImageExtensions().contains(ext)) { - return ExtensionMediaType.IMAGE; - } else if (FileTypeExtensions.getVideoExtensions().contains(ext)) { - return ExtensionMediaType.VIDEO; - } else if (FileTypeExtensions.getAudioExtensions().contains(ext)) { - return ExtensionMediaType.AUDIO; - } else if (FileTypeExtensions.getDocumentExtensions().contains(ext)) { - return ExtensionMediaType.DOC; - } else if (FileTypeExtensions.getExecutableExtensions().contains(ext)) { - return ExtensionMediaType.EXECUTABLE; - } else if (FileTypeExtensions.getTextExtensions().contains(ext)) { - return ExtensionMediaType.TEXT; - } else if (FileTypeExtensions.getWebExtensions().contains(ext)) { - return ExtensionMediaType.WEB; - } else if (FileTypeExtensions.getPDFExtensions().contains(ext)) { - return ExtensionMediaType.PDF; - } else if (FileTypeExtensions.getArchiveExtensions().contains(ext)) { - return ExtensionMediaType.ARCHIVE; - } else { - return ExtensionMediaType.UNCATEGORIZED; - } - } - private SleuthkitCase getCase() throws NoCurrentCaseException { return Case.getCurrentCaseThrows().getSleuthkitCase(); } - public SearchResultsDTO getFilesByExtension(FileTypeExtensionsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getFilesByExtension(FileTypeExtensionsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getFilter() == null) { throw new IllegalArgumentException("Must have non-null filter"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -117,14 +98,10 @@ public class ViewsDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchExtensionSearchResultsDTOs(key.getFilter(), key.getDataSourceId(), startItem, maxCount)); } - public SearchResultsDTO getFilesByMime(FileTypeMimeSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getFilesByMime(FileTypeMimeSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getMimeType() == null) { throw new IllegalArgumentException("Must have non-null filter"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -132,14 +109,10 @@ public class ViewsDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchMimeSearchResultsDTOs(key.getMimeType(), key.getDataSourceId(), startItem, maxCount)); } - public SearchResultsDTO getFilesBySize(FileTypeSizeSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getFilesBySize(FileTypeSizeSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getSizeFilter() == null) { throw new IllegalArgumentException("Must have non-null filter"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -147,41 +120,38 @@ public class ViewsDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchSizeSearchResultsDTOs(key.getSizeFilter(), key.getDataSourceId(), startItem, maxCount)); } - public boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, Content eventData) { - if (!(eventData instanceof AbstractFile)) { + private boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, DAOEvent eventData) { + if (!(eventData instanceof FileTypeExtensionsEvent)) { return false; } - AbstractFile file = (AbstractFile) eventData; - String extension = "." + file.getNameExtension().toLowerCase(); - return key.getFilter().getFilter().contains(extension); + FileTypeExtensionsEvent extEvt = (FileTypeExtensionsEvent) eventData; + String extension = extEvt.getExtension().toLowerCase(); + return key.getFilter().getFilter().contains(extension) + && (key.getDataSourceId() == null || key.getDataSourceId().equals(extEvt.getDataSourceId())); } - public boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, Content eventData) { - if (!(eventData instanceof AbstractFile)) { + private boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, DAOEvent eventData) { + if (!(eventData instanceof FileTypeMimeEvent)) { return false; } - AbstractFile file = (AbstractFile) eventData; - String mimeType = file.getMIMEType(); - return key.getMimeType().equalsIgnoreCase(mimeType); + FileTypeMimeEvent mimeEvt = (FileTypeMimeEvent) eventData; + return mimeEvt.getMimeType().startsWith(key.getMimeType()) + && (key.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), mimeEvt.getDataSourceId())); } - public boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, Content eventData) { - if (!(eventData instanceof AbstractFile)) { + private boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, DAOEvent eventData) { + if (!(eventData instanceof FileTypeSizeEvent)) { return false; } - long size = eventData.getSize(); - - return size >= key.getSizeFilter().getMinBound() && (key.getSizeFilter().getMaxBound() == null || size < key.getSizeFilter().getMaxBound()); + FileTypeSizeEvent sizeEvt = (FileTypeSizeEvent) eventData; + return sizeEvt.getSizeFilter().equals(key.getSizeFilter()) + && (key.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), sizeEvt.getDataSourceId())); } /** @@ -286,7 +256,7 @@ public class ViewsDAO { * * @return The clause to be proceeded with 'where' or 'and'. */ - private static String getFileSizeClause(FileTypeSizeSearchParams.FileSizeFilter filter) { + private static String getFileSizeClause(FileSizeFilter filter) { return filter.getMaxBound() == null ? "(size >= " + filter.getMinBound() + ")" : "(size >= " + filter.getMinBound() + " AND size < " + filter.getMaxBound() + ")"; @@ -314,7 +284,7 @@ public class ViewsDAO { * * @return The clause to be proceeded with 'where' or 'and'. */ - private String getFileSizesWhereStatement(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId) { + private String getFileSizesWhereStatement(FileSizeFilter filter, Long dataSourceId) { String query = getBaseFileSizeFilter() + " AND " + getFileSizeClause(filter) + getDataSourceAndClause(dataSourceId); @@ -350,7 +320,7 @@ public class ViewsDAO { new FileTypeExtensionsSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -370,12 +340,12 @@ public class ViewsDAO { * @throws ExecutionException */ public TreeResultsDTO getFileSizeCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException { - Map whereClauses = Stream.of(FileTypeSizeSearchParams.FileSizeFilter.values()) + Map whereClauses = Stream.of(FileSizeFilter.values()) .collect(Collectors.toMap( filter -> filter, filter -> getFileSizeClause(filter))); - Map countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true); + Map countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true); List> treeList = countsByFilter.entrySet().stream() .map(entry -> { @@ -384,7 +354,7 @@ public class ViewsDAO { new FileTypeSizeSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -469,9 +439,9 @@ public class ViewsDAO { new FileTypeMimeSearchParams(entry.getKey(), dataSourceId), name, name, - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) - .sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType())) + .sorted((a, b) -> stringCompare(a.getSearchParams().getMimeType(), b.getSearchParams().getMimeType())) .collect(Collectors.toList()); return new TreeResultsDTO<>(treeList); @@ -597,7 +567,7 @@ public class ViewsDAO { return fetchFileViewFiles(whereStatement, MIME_TYPE_DISPLAY_NAME, startItem, maxResultCount); } - private SearchResultsDTO fetchSizeSearchResultsDTOs(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException { + private SearchResultsDTO fetchSizeSearchResultsDTOs(FileSizeFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException { String whereStatement = getFileSizesWhereStatement(filter, dataSourceId); return fetchFileViewFiles(whereStatement, filter.getDisplayName(), startItem, maxResultCount); } @@ -633,13 +603,188 @@ public class ViewsDAO { file.getId(), file.getName(), file.getNameExtension(), - getExtensionMediaType(file.getNameExtension()), + MediaTypeUtils.getExtensionMediaType(file.getNameExtension()), file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC), file.getType(), cellValues)); } - return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, startItem, totalResultsCount); + return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, AbstractFile.class.getName(), startItem, totalResultsCount); + } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + private Pair getMimePieces(String mimeType) { + int idx = mimeType.indexOf("/"); + String mimePrefix = idx > 0 ? mimeType.substring(0, idx) : mimeType; + String mimeSuffix = idx > 0 ? mimeType.substring(idx + 1) : null; + return Pair.of(mimePrefix, mimeSuffix); + } + + @Override + Set handleIngestComplete() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set shouldRefreshTree() { + // GVDTODO + return Collections.emptySet(); + } + + @Override + Set processEvent(PropertyChangeEvent evt) { + // GVDTODO maps may not be necessary now that this isn't processing a list of events. + Map> fileExtensionDsMap = new HashMap<>(); + Map>> mimeTypeDsMap = new HashMap<>(); + Map> fileSizeDsMap = new HashMap<>(); + + AbstractFile af = DAOEventUtils.getFileFromFileEvent(evt); + if (af == null) { + return Collections.emptySet(); + } + + // create an extension mapping if extension present + if (!StringUtils.isBlank(af.getNameExtension())) { + fileExtensionDsMap + .computeIfAbsent("." + af.getNameExtension(), (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } + + // create a mime type mapping if mime type present + if (!StringUtils.isBlank(af.getMIMEType())) { + Pair mimePieces = getMimePieces(af.getMIMEType()); + mimeTypeDsMap + .computeIfAbsent(mimePieces.getKey(), (k) -> new HashMap<>()) + .computeIfAbsent(mimePieces.getValue(), (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } + + // create a size mapping if size present + FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values()) + .filter(filter -> af.getSize() >= filter.getMinBound() && (filter.getMaxBound() == null || af.getSize() < filter.getMaxBound())) + .findFirst() + .orElse(null); + + if (sizeFilter != null) { + fileSizeDsMap + .computeIfAbsent(sizeFilter, (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } + + if (fileExtensionDsMap.isEmpty() && mimeTypeDsMap.isEmpty() && fileSizeDsMap.isEmpty()) { + return Collections.emptySet(); + } + + clearRelevantCacheEntries(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap); + + return getDAOEvents(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap); + } + + /** + * + * Clears relevant cache entries from cache based on digest of autopsy + * events. + * + * @param fileExtensionDsMap Maps the file extension to the data sources + * where files were found with that extension. + * @param mimeTypeDsMap Maps the mime type to the data sources where + * files were found with that mime type. + * @param fileSizeDsMap Maps the size to the data sources where files + * + * @return The list of affected dao events. + */ + private Set getDAOEvents(Map> fileExtensionDsMap, + Map>> mimeTypeDsMap, + Map> fileSizeDsMap) { + + Stream fileExtStream = fileExtensionDsMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeExtensionsEvent(entry.getKey(), dsId))); + + Set fileMimeList = new HashSet<>(); + for (Entry>> prefixEntry : mimeTypeDsMap.entrySet()) { + String mimePrefix = prefixEntry.getKey(); + for (Entry> suffixEntry : prefixEntry.getValue().entrySet()) { + String mimeSuffix = suffixEntry.getKey(); + for (long dsId : suffixEntry.getValue()) { + String mimeType = mimePrefix + (mimeSuffix == null ? "" : ("/" + mimeSuffix)); + fileMimeList.add(new FileTypeMimeEvent(mimeType, dsId)); + } + } + } + + Stream fileSizeStream = fileSizeDsMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeSizeEvent(entry.getKey(), dsId))); + + return Stream.of(fileExtStream, fileMimeList.stream(), fileSizeStream) + .flatMap(stream -> stream) + .collect(Collectors.toSet()); + } + + /** + * Clears relevant cache entries from cache based on digest of autopsy + * events. + * + * @param fileExtensionDsMap Maps the file extension to the data sources + * where files were found with that extension. + * @param mimeTypeDsMap Maps the mime type to the data sources where + * files were found with that mime type. + * @param fileSizeDsMap Maps the size to the data sources where files + * were found within that size filter. + */ + private void clearRelevantCacheEntries(Map> fileExtensionDsMap, + Map>> mimeTypeDsMap, + Map> fileSizeDsMap) { + + // invalidate cache entries that are affected by events + ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + Object baseParams = k.getParamData(); + if (baseParams instanceof FileTypeExtensionsSearchParams) { + FileTypeExtensionsSearchParams extParams = (FileTypeExtensionsSearchParams) baseParams; + // if search params have a filter where extension is present and the data source id is null or == + boolean isMatch = extParams.getFilter().getFilter().stream().anyMatch((ext) -> { + Set dsIds = fileExtensionDsMap.get(ext); + return (dsIds != null && (extParams.getDataSourceId() == null || dsIds.contains(extParams.getDataSourceId()))); + }); + + if (isMatch) { + concurrentMap.remove(k); + } + } else if (baseParams instanceof FileTypeMimeSearchParams) { + FileTypeMimeSearchParams mimeParams = (FileTypeMimeSearchParams) baseParams; + Pair mimePieces = getMimePieces(mimeParams.getMimeType()); + Map> suffixes = mimeTypeDsMap.get(mimePieces.getKey()); + if (suffixes == null) { + return; + } + + // if search params is top level mime prefix (without suffix) and data source is null or ==. + if (mimePieces.getValue() == null + && (mimeParams.getDataSourceId() == null + || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> Objects.equals(mimeParams.getDataSourceId(), ds)))) { + + concurrentMap.remove(k); + // otherwise, see if suffix is present + } else { + Set dataSources = suffixes.get(mimePieces.getValue()); + if (dataSources != null && (mimeParams.getDataSourceId() == null || dataSources.contains(mimeParams.getDataSourceId()))) { + concurrentMap.remove(k); + } + } + + } else if (baseParams instanceof FileTypeSizeSearchParams) { + FileTypeSizeSearchParams sizeParams = (FileTypeSizeSearchParams) baseParams; + Set dataSources = fileSizeDsMap.get(sizeParams.getSizeFilter()); + if (dataSources != null && (sizeParams.getDataSourceId() == null || dataSources.contains(sizeParams.getDataSourceId()))) { + concurrentMap.remove(k); + } + } + }); } /** @@ -656,19 +801,18 @@ public class ViewsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getViewsDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected ViewsDAO getDAO() { + return MainDAO.getInstance().getViewsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = this.getContentFromEvt(evt); - if (content == null) { - return false; - } + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - return MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating(this.getParameters(), content); + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isFilesByExtInvalidating(this.getParameters(), evt); } } @@ -686,26 +830,25 @@ public class ViewsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getViewsDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected ViewsDAO getDAO() { + return MainDAO.getInstance().getViewsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = this.getContentFromEvt(evt); - if (content == null) { - return false; - } + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - return MainDAO.getInstance().getViewsDAO().isFilesByMimeInvalidating(this.getParameters(), content); + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isFilesByMimeInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of data for file types by size. */ - public static class FileTypeSizeFetcher extends DAOFetcher { + public class FileTypeSizeFetcher extends DAOFetcher { /** * Main constructor. @@ -716,19 +859,18 @@ public class ViewsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getViewsDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected ViewsDAO getDAO() { + return MainDAO.getInstance().getViewsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = this.getContentFromEvt(evt); - if (content == null) { - return false; - } + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } - return MainDAO.getInstance().getViewsDAO().isFilesBySizeInvalidating(this.getParameters(), content); + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isFilesBySizeInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java new file mode 100644 index 0000000000..f8555c402c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java @@ -0,0 +1,33 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class AnalysisResultEvent extends BlackboardArtifactEvent { + + public AnalysisResultEvent(BlackboardArtifact.Type artifactType, long dataSourceId) { + super(artifactType, dataSourceId); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java new file mode 100644 index 0000000000..c0bdf9e90b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java @@ -0,0 +1,38 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * An event for an Analysis Result that is organized by Set names to + * signal that one has been added or removed on a given data source. + */ +public class AnalysisResultSetEvent extends AnalysisResultEvent { + private final String setName; + + public AnalysisResultSetEvent(String setName, BlackboardArtifact.Type artifactType, long dataSourceId) { + super(artifactType, dataSourceId); + this.setName = setName; + } + + public String getSetName() { + return setName; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java new file mode 100644 index 0000000000..a2ecc73bf4 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java @@ -0,0 +1,79 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * A base class for DataArtifact and AnalysisResult events to signal that one + * has been added or removed. + */ +public abstract class BlackboardArtifactEvent implements DAOEvent { + private final BlackboardArtifact.Type artifactType; + private final long dataSourceId; + + BlackboardArtifactEvent(BlackboardArtifact.Type artifactType, long dataSourceId) { + this.artifactType = artifactType; + this.dataSourceId = dataSourceId; + } + + public BlackboardArtifact.Type getArtifactType() { + return artifactType; + } + + public long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 5; + hash = 17 * hash + Objects.hashCode(this.artifactType); + hash = 17 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32)); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final BlackboardArtifactEvent other = (BlackboardArtifactEvent) obj; + if (this.dataSourceId != other.dataSourceId) { + return false; + } + if (!Objects.equals(this.artifactType, other.artifactType)) { + return false; + } + return true; + } + + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java new file mode 100755 index 0000000000..1da76f7f34 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java @@ -0,0 +1,76 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * An event for handling + */ +public class CommAccountsEvent extends DataArtifactEvent { + + private final Account.Type accountType; + + /** + * Main constructor. + * + * @param accountType The account type identifier. + * @param dataSourceId The data source id to filter on or null. + */ + public CommAccountsEvent(Account.Type accountType, Long dataSourceId) { + super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); + this.accountType = accountType; + } + + /** + * @return The account type identifier. + */ + public Account.Type getAccountType() { + return accountType; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 29 * hash + Objects.hashCode(this.accountType); + hash = 29 * hash + super.hashCode(); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final CommAccountsEvent other = (CommAccountsEvent) obj; + if (!Objects.equals(this.accountType, other.accountType)) { + return false; + } + return super.equals(obj); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java new file mode 100644 index 0000000000..d8930e37ca --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java @@ -0,0 +1,46 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Collections; +import java.util.Set; + +/** + * A single event containing an aggregate of all affected data. + */ +public class DAOAggregateEvent { + + private final Set objects; + + /** + * Main constructor. + * + * @param objects The list of events in this aggregate event. + */ + public DAOAggregateEvent(Set objects) { + this.objects = Collections.unmodifiableSet(objects); + } + + /** + * @return The events in this aggregate event. + */ + public Set getEvents() { + return objects; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java new file mode 100644 index 0000000000..c4a1c3a3ca --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java @@ -0,0 +1,28 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +/** + * An event emitted by the DAO. + */ +public interface DAOEvent { + public enum Type { TREE, RESULT } + + DAOEvent.Type getType(); +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java new file mode 100644 index 0000000000..33789cb007 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java @@ -0,0 +1,123 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import org.apache.commons.collections4.CollectionUtils; + +/** + * + * Handles refreshes in DAOs based on incoming events handling throttles + */ +public class DAOEventBatcher { + + /** + * The Refresher interface needs to be implemented by ChildFactory instances + * that wish to take advantage of throttled refresh functionality. + */ + public interface BatchedEventsHandler { + + /** + * Handles a list of aggregated events. + * + * @param events The events to handle. + */ + void handle(Set events); + } + + private final ScheduledThreadPoolExecutor refreshExecutor + = new ScheduledThreadPoolExecutor(1, + new ThreadFactoryBuilder().setNameFormat(DAOEventBatcher.class.getName()).build()); + + private Set aggregateEvents = new HashSet<>(); + private Object eventListLock = new Object(); + private boolean isRunning = false; + + private final BatchedEventsHandler eventsHandler; + private final long batchMillis; + + public DAOEventBatcher(BatchedEventsHandler eventsHandler, long batchMillis) { + this.eventsHandler = eventsHandler; + this.batchMillis = batchMillis; + } + + /** + * Queues an event to be fired as a part of a time-windowed batch. + * + * @param event The event. + */ + public void queueEvent(T event) { + synchronized (this.eventListLock) { + this.aggregateEvents.add(event); + verifyRunning(); + } + } + + /** + * Starts up throttled event runner if not currently running. + */ + private void verifyRunning() { + synchronized (this.eventListLock) { + if (!this.isRunning) { + refreshExecutor.schedule(() -> fireEvents(), this.batchMillis, TimeUnit.MILLISECONDS); + this.isRunning = true; + } + } + } + + /** + * Queues an event to be fired as a part of a time-windowed batch. + * + * @param events The events. + */ + public void enqueueAllEvents(Collection events) { + if (CollectionUtils.isNotEmpty(events)) { + synchronized (this.eventListLock) { + this.aggregateEvents.addAll(events); + verifyRunning(); + } + } + } + + /** + * Flushes any currently batched events emptying queue of batched events. + * + * @return The flushed events. + */ + public Set flushEvents() { + synchronized (this.eventListLock) { + Set evtsToFire = this.aggregateEvents; + this.aggregateEvents = new HashSet<>(); + this.isRunning = false; + return evtsToFire; + } + } + + /** + * Fires all events and clears batch. + */ + private void fireEvents() { + this.eventsHandler.handle(flushEvents()); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java new file mode 100644 index 0000000000..d6789f7b36 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java @@ -0,0 +1,108 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.beans.PropertyChangeEvent; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; + +/** + * + * Utilities for handling events in DAO + */ +public class DAOEventUtils { + + /** + * Returns the file content from the event. If the event is not a file event + * or the event does not contain file content, null is returned. + * + * @param evt The event + * + * @return The inner content or null if no content. + */ + public static Content getContentFromFileEvent(PropertyChangeEvent evt) { + String eventName = evt.getPropertyName(); + Content derivedContent = getDerivedFileContentFromFileEvent(evt); + if (derivedContent != null) { + return derivedContent; + } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) + && (evt.getNewValue() instanceof Content)) { + return (Content) evt.getNewValue(); + } else { + return null; + } + } + + /** + * Returns the content from the ModuleContentEvent. If the event does not + * contain a event or the event does not contain Content, null is returned. + * + * @param evt The event + * + * @return The inner content or null if no content. + */ + public static Content getDerivedFileContentFromFileEvent(PropertyChangeEvent evt) { + String eventName = evt.getPropertyName(); + if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleContentEvent) + && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { + + return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); + + } else { + return null; + } + } + + /** + * Returns a file in the event if a file is found in the event. + * + * @param evt The autopsy event. + * + * @return The inner file or null if no file found. + */ + public static AbstractFile getFileFromFileEvent(PropertyChangeEvent evt) { + Content content = getContentFromFileEvent(evt); + return (content instanceof AbstractFile) + ? ((AbstractFile) content) + : null; + } + + /** + * Returns the ModuleDataEvent in the event if there is a child + * ModuleDataEvent. If not, null is returned. + * + * @param evt The event. + * + * @return The inner ModuleDataEvent or null. + */ + public static ModuleDataEvent getModuelDataFromArtifactEvent(PropertyChangeEvent evt) { + String eventName = evt.getPropertyName(); + if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleDataEvent)) { + + return (ModuleDataEvent) evt.getOldValue(); + } else { + return null; + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java new file mode 100644 index 0000000000..3cbd809414 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java @@ -0,0 +1,32 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class DataArtifactEvent extends BlackboardArtifactEvent { + + public DataArtifactEvent(BlackboardArtifact.Type artifactType, long dataSourceId) { + super(artifactType, dataSourceId); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java new file mode 100644 index 0000000000..a72c93cea2 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java @@ -0,0 +1,74 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event signaling that children files were added or removed from the given + * parent ID. + */ +public class FileSystemContentEvent implements DAOEvent { + + private final Long contentObjectId; + + /** + * Main constructor. + * + * @param contentObjectId The parent content object id. If null, performs + * full refresh of file tree. + */ + public FileSystemContentEvent(Long contentObjectId) { + this.contentObjectId = contentObjectId; + } + + public Long getContentObjectId() { + return contentObjectId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 67 * hash + Objects.hashCode(this.contentObjectId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileSystemContentEvent other = (FileSystemContentEvent) obj; + if (!Objects.equals(this.contentObjectId, other.contentObjectId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java new file mode 100644 index 0000000000..f777435474 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java @@ -0,0 +1,68 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event signaling that a data source has been added or removed from the + * given Host. + */ +public class FileSystemHostEvent implements DAOEvent { + + private final Long hostObjectId; + + public FileSystemHostEvent(Long hostObjectId) { + this.hostObjectId = hostObjectId; + } + + public Long getHostObjectId() { + return hostObjectId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 67 * hash + Objects.hashCode(this.hostObjectId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileSystemHostEvent other = (FileSystemHostEvent) obj; + if (!Objects.equals(this.hostObjectId, other.hostObjectId)) { + return false; + } + return true; + } + + @Override + public DAOEvent.Type getType() { + return DAOEvent.Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java new file mode 100644 index 0000000000..110429a6d9 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java @@ -0,0 +1,73 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event signaling that a host has been added or removed from the given + * Person. + */ +public class FileSystemPersonEvent implements DAOEvent { + + private final Long personObjectId; + + /** + * Main constructor. + * + * @param personObjectId May be null for hosts with no associated Person. + */ + public FileSystemPersonEvent(Long personObjectId) { + this.personObjectId = personObjectId; + } + + public Long getPersonObjectId() { + return personObjectId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 67 * hash + Objects.hashCode(this.personObjectId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileSystemPersonEvent other = (FileSystemPersonEvent) obj; + if (!Objects.equals(this.personObjectId, other.personObjectId)) { + return false; + } + return true; + } + + @Override + public DAOEvent.Type getType() { + return DAOEvent.Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java new file mode 100644 index 0000000000..dadac922b1 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java @@ -0,0 +1,78 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event to signal that files have been added or removed + * with the given extension on the given data source. + */ +public class FileTypeExtensionsEvent implements DAOEvent { + + private final String extension; + private final long dataSourceId; + + public FileTypeExtensionsEvent(String extension, long dataSourceId) { + this.extension = extension; + this.dataSourceId = dataSourceId; + } + + public String getExtension() { + return extension; + } + + public long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 59 * hash + Objects.hashCode(this.extension); + hash = 59 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32)); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTypeExtensionsEvent other = (FileTypeExtensionsEvent) obj; + if (this.dataSourceId != other.dataSourceId) { + return false; + } + if (!Objects.equals(this.extension, other.extension)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java new file mode 100755 index 0000000000..96b884a432 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java @@ -0,0 +1,77 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event pertaining to MIME types view from the DAO. + */ +public class FileTypeMimeEvent implements DAOEvent { + + private final String mimeType; + private final long dataSourceId; + + public FileTypeMimeEvent(String mimeType, long dataSourceId) { + this.mimeType = mimeType; + this.dataSourceId = dataSourceId; + } + + public String getMimeType() { + return mimeType; + } + + public long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 29 * hash + Objects.hashCode(this.mimeType); + hash = 29 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTypeMimeEvent other = (FileTypeMimeEvent) obj; + if (!Objects.equals(this.mimeType, other.mimeType)) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java new file mode 100755 index 0000000000..eb0f37f8a4 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java @@ -0,0 +1,79 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.autopsy.mainui.datamodel.FileSizeFilter; + +/** + * An event to signal that files have been added or removed + * within the given size range on the given data source. + */ +public class FileTypeSizeEvent implements DAOEvent { + + private final FileSizeFilter sizeFilter; + private final Long dataSourceId; + + public FileTypeSizeEvent(FileSizeFilter sizeFilter, Long dataSourceId) { + this.sizeFilter = sizeFilter; + this.dataSourceId = dataSourceId; + } + + public FileSizeFilter getSizeFilter() { + return sizeFilter; + } + + public Long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 53 * hash + Objects.hashCode(this.sizeFilter); + hash = 53 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTypeSizeEvent other = (FileTypeSizeEvent) obj; + if (this.sizeFilter != other.sizeFilter) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java new file mode 100644 index 0000000000..6141ac7805 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java @@ -0,0 +1,45 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class KeywordHitEvent extends AnalysisResultSetEvent { + + private final String regex; + private final String match; + + public KeywordHitEvent(String regex, String match, String setName, BlackboardArtifact.Type artifactType, long dataSourceId) { + super(setName, artifactType, dataSourceId); + this.regex = regex; + this.match = match; + } + + public String getRegex() { + return regex; + } + + public String getMatch() { + return match; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java new file mode 100644 index 0000000000..50805b16b8 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java @@ -0,0 +1,30 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +/** + * An event that OS Accounts were changed. + */ +public class OsAccountEvent implements DAOEvent { + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java new file mode 100755 index 0000000000..948f96e20d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java @@ -0,0 +1,93 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType; + +/** + * An event to signal that tags have been added or removed on the + * given data source with the given types. + */ +public class TagsEvent implements DAOEvent { + + private final TagType type; + private final Long tagNameId; + private final Long dataSourceId; + + public TagsEvent(TagType type, Long tagNameId, Long dataSourceId) { + this.type = type; + this.tagNameId = tagNameId; + this.dataSourceId = dataSourceId; + } + + public TagType getTagType() { + return type; + } + + public Long getTagNameId() { + return tagNameId; + } + + /** + * @return The data source object id for the tag. Is null if cannot be + * determined. + */ + public Long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 97 * hash + Objects.hashCode(this.type); + hash = 97 * hash + Objects.hashCode(this.tagNameId); + hash = 97 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final TagsEvent other = (TagsEvent) obj; + if (this.type != other.type) { + return false; + } + if (!Objects.equals(this.tagNameId, other.tagNameId)) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeCounts.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeCounts.java new file mode 100644 index 0000000000..edb62d0f01 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeCounts.java @@ -0,0 +1,149 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * This class is in charge of tracking tree events. When an autopsy event comes + * in that affects a tree node, the sub DAO's enqueue the event in this class + * along with the timeout (current time + timeoutMillis). If another autopsy + * event comes in affecting the same tree category, the timeout is reset. Events + * are not removed from tracking until getEventTimeouts is flushEvents are + * called. The MainDAO has a periodically running task to see if any tree events + * have timed out, and broadcasts those events that have reached timeout. + */ +public class TreeCounts { + + private static final long DEFAULT_TIMEOUT_MILLIS = 2 * 60 * 1000; + + private final Object timeoutLock = new Object(); + private final Map eventTimeouts = new HashMap<>(); + + private final long timeoutMillis; + + /** + * Constructor that uses default timeout duration. + */ + public TreeCounts() { + this(DEFAULT_TIMEOUT_MILLIS); + } + + /** + * Main constructor. + * + * @param timeoutMillis How long to track an event before it reaches a + * timeout (in milliseconds). + */ + public TreeCounts(long timeoutMillis) { + this.timeoutMillis = timeoutMillis; + } + + /** + * Returns the current time in milliseconds. + * + * @return The current time in milliseconds. + */ + private long getCurTime() { + return System.currentTimeMillis(); + } + + /** + * Returns the timeout time based on the current time. + * + * @return The current time. + */ + private long getTimeoutTime() { + return getCurTime() + timeoutMillis; + } + + /** + * Adds events to be tracked until they reach timeout. + * + * @param events The events to be tracked. + * + * @return The subset of events that were not already being tracked. + */ + public Collection enqueueAll(Collection events) { + Collection updateToIndeterminate = new ArrayList<>(); + + synchronized (this.timeoutLock) { + for (T event : events) { + this.eventTimeouts.compute(event, (k, v) -> { + if (v == null) { + updateToIndeterminate.add(event); + } + return getTimeoutTime(); + }); + } + } + + return updateToIndeterminate; + } + + /** + * Returns the set of events that are currently being tracked for timeout. + * + * @return The events that are being tracked for timeout. + */ + public Set getEnqueued() { + return new HashSet<>(eventTimeouts.keySet()); + } + + /** + * Returns the events that have reached timeout based on the current time + * stamp and removes them from tracking. + * + * @return The + */ + public Collection getEventTimeouts() { + long curTime = getCurTime(); + List toUpdate; + synchronized (this.timeoutLock) { + toUpdate = this.eventTimeouts.entrySet().stream() + .filter(e -> e.getValue() < curTime) + .map(e -> e.getKey()) + .collect(Collectors.toList()); + + this.eventTimeouts.keySet().removeAll(toUpdate); + } + return toUpdate; + } + + /** + * Returns all currently tracked events despite timeout. This method removes + * all events from tracking. + * + * @return All currently tracked events. + */ + public Collection flushEvents() { + synchronized (this.timeoutLock) { + List toRet = new ArrayList<>(eventTimeouts.keySet()); + eventTimeouts.clear(); + return toRet; + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java new file mode 100644 index 0000000000..4fadbbc672 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java @@ -0,0 +1,85 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; + +/** + * An event to signal that an item in the tree has been + * added or changed. + */ +public class TreeEvent implements DAOEvent { + + private final TreeItemDTO itemRecord; // the updated item + private final boolean refreshRequired; // true if tree should request new data from DAO + + /** + * @param itemRecord The updated item + * @param rereshRequired True if the tree should go to the DAO for updated data + */ + public TreeEvent(TreeItemDTO itemRecord, boolean refreshRequired) { + this.itemRecord = itemRecord; + this.refreshRequired = refreshRequired; + } + + public TreeItemDTO getItemRecord() { + return itemRecord; + } + + public boolean isRefreshRequired() { + return refreshRequired; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 89 * hash + Objects.hashCode(this.itemRecord); + hash = 89 * hash + (this.refreshRequired ? 1 : 0); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final TreeEvent other = (TreeEvent) obj; + if (this.refreshRequired != other.refreshRequired) { + return false; + } + if (!Objects.equals(this.itemRecord, other.itemRecord)) { + return false; + } + return true; + } + + + + @Override + public Type getType() { + return Type.TREE; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultNode.java index b71f1f0e1c..17bda9720a 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultNode.java @@ -85,8 +85,7 @@ public class AnalysisResultNode extends ArtifactNode getExtractArchiveWithPasswordActionFile() { Optional optionalSourceContent = getSourceContent(); - // GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!----- - // See JIRA-8099 + // TODO: See JIRA-8099 boolean encryptionDetected = false; if(optionalSourceContent.isPresent()) { if (optionalSourceContent.get() instanceof AbstractFile) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java index e77f88bc5f..6cbee4278d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java @@ -21,27 +21,22 @@ package org.sleuthkit.autopsy.mainui.nodes; import org.sleuthkit.autopsy.mainui.datamodel.KeywordSearchTermParams; import org.sleuthkit.autopsy.mainui.datamodel.KeywordMatchParams; import com.google.common.collect.ImmutableSet; -import java.beans.PropertyChangeEvent; +import java.util.Comparator; import java.util.Set; import java.util.concurrent.ExecutionException; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.Category; import org.sleuthkit.datamodel.TskData; /** @@ -49,6 +44,9 @@ import org.sleuthkit.datamodel.TskData; */ public class AnalysisResultTypeFactory extends TreeChildFactory { + private final static Comparator STRING_COMPARATOR = Comparator.nullsFirst(Comparator.naturalOrder()); + + @SuppressWarnings("deprecation") private static Set SET_TREE_ARTIFACTS = ImmutableSet.of( BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID(), BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(), @@ -85,9 +83,9 @@ public class AnalysisResultTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - if (SET_TREE_ARTIFACTS.contains(rowData.getTypeData().getArtifactType().getTypeID())) { - return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null)); - } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getTypeData().getArtifactType())) { + if (SET_TREE_ARTIFACTS.contains(rowData.getSearchParams().getArtifactType().getTypeID())) { + return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getSearchParams().getArtifactType(), dataSourceId, null)); + } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getSearchParams().getArtifactType())) { return new TreeTypeNode(rowData, new KeywordSetFactory(dataSourceId)); } else { return new AnalysisResultTypeTreeNode(rowData); @@ -95,71 +93,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; } - /** - * See if expected blackboard type matches event. - * - * @param expectedType The expected artifact type. - * @param evt The event. - * - * @return If the event is a data added event and contains the provided - * type. - */ - private static boolean isRefreshRequired(BlackboardArtifact.Type expectedType, PropertyChangeEvent evt) { - String eventType = evt.getPropertyName(); - if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { - /** - * This is a stop gap measure until a different way of handling the - * closing of cases is worked out. Currently, remote events may be - * received for a case that is already closed. - */ - try { - Case.getCurrentCaseThrows(); - /** - * Due to some unresolved issues with how cases are closed, it - * is possible for the event to have a null oldValue if the - * event is a remote event. - */ - final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue(); - // GVDTODO it may be necessary to have more fine-grained check for refresh here. - if (null != event && expectedType.equals(event.getBlackboardArtifactType())) { - return true; - } - } catch (NoCurrentCaseException notUsed) { - /** - * Case is closed, do nothing. - */ - } - } - return false; + @Override + public int compare(AnalysisResultSearchParam o1, AnalysisResultSearchParam o2) { + return o1.getArtifactType().getDisplayName().compareTo(o2.getArtifactType().getDisplayName()); } /** @@ -173,14 +114,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayAnalysisResult(this.getItemData().getTypeData()); + dataResultPanel.displayAnalysisResult(this.getItemData().getSearchParams()); } } @@ -195,8 +136,8 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData, ChildFactory childFactory) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData, Children.create(childFactory, true), getDefaultLookup(itemData)); @@ -234,13 +175,19 @@ public class AnalysisResultTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + return new TreeSetTypeNode(rowData); } @Override - protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - return new TreeSetTypeNode(rowData); + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(AnalysisResultSetSearchParam o1, AnalysisResultSetSearchParam o2) { + return STRING_COMPARATOR.compare(o1.getSetName(), o2.getSetName()); } } @@ -252,11 +199,11 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData, Children.LEAF, getDefaultLookup(itemData)); @@ -264,7 +211,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData, - Children.create(new KeywordSearchTermFactory(itemData.getTypeData()), true), + Children.create(new KeywordSearchTermFactory(itemData.getSearchParams()), true), getDefaultLookup(itemData)); - } + } } /** @@ -330,8 +277,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(KeywordSearchTermParams o1, KeywordSearchTermParams o2) { + return STRING_COMPARATOR.compare(o1.getSearchTerm(), o2.getSearchTerm()); } } @@ -347,19 +300,19 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getSearchTerm(), + super(itemData.getSearchParams().getSearchTerm(), getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT), itemData, - (itemData.getTypeData().hasChildren() || itemData.getTypeData().getSearchType() == TskData.KeywordSearchQueryType.REGEX + (itemData.getSearchParams().hasChildren() || itemData.getSearchParams().getSearchType() == TskData.KeywordSearchQueryType.REGEX // for regex queries always create a subtree, even if there is only one child - ? Children.create(new KeywordFoundMatchFactory(itemData.getTypeData()), true) + ? Children.create(new KeywordFoundMatchFactory(itemData.getSearchParams()), true) : Children.LEAF), getDefaultLookup(itemData)); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - KeywordSearchTermParams searchTermParams = this.getItemData().getTypeData(); + KeywordSearchTermParams searchTermParams = this.getItemData().getSearchParams(); if (!searchTermParams.hasChildren()) { KeywordHitSearchParam searchParams = new KeywordHitSearchParam(searchTermParams.getDataSourceId(), @@ -407,8 +360,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(KeywordMatchParams o1, KeywordMatchParams o2) { + return STRING_COMPARATOR.compare(o1.getKeywordMatch(), o2.getKeywordMatch()); } } @@ -424,7 +383,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getKeywordMatch(), + super(itemData.getSearchParams().getKeywordMatch(), getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT), itemData, Children.LEAF, @@ -433,7 +392,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory { * * @param pageSize The number of items per page. * @param pageIdx The page index. - * @param hardRefresh Whether or not to perform a hard refresh. * * @return The retrieved data. * * @throws ExecutionException */ - public abstract SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException; + public abstract SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException; /** * Returns true if the ingest module event will require a refresh in the @@ -74,49 +70,5 @@ public abstract class DAOFetcher

{ * * @return True if the */ - public abstract boolean isRefreshRequired(PropertyChangeEvent evt); - - /** - * Returns the content from the ModuleContentEvent. If the event does not - * contain a ModuleContentEvent or the event does not contain Content, null - * is returned. - * - * @param evt The event - * - * @return The inner content or null if no content. - */ - protected Content getContentFromEvt(PropertyChangeEvent evt) { - String eventName = evt.getPropertyName(); - if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleContentEvent) - && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { - - return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); - - } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) - && (evt.getNewValue() instanceof Content)) { - return (Content) evt.getNewValue(); - } else { - return null; - } - } - - /** - * Returns the ModuleDataEvent in the event if there is a child - * ModuleDataEvent. If not, null is returned. - * - * @param evt The event. - * - * @return The inner ModuleDataEvent or null. - */ - protected ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) { - String eventName = evt.getPropertyName(); - if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleDataEvent)) { - - return (ModuleDataEvent) evt.getOldValue(); - } else { - return null; - } - } + public abstract boolean isRefreshRequired(DAOEvent evt); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 50ef7cb987..4ecbe55a08 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -18,26 +18,28 @@ */ package org.sleuthkit.autopsy.mainui.nodes; -import java.beans.PropertyChangeEvent; +import java.util.Objects; import java.util.concurrent.ExecutionException; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.openide.nodes.Children; +import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; +import org.sleuthkit.autopsy.datamodel.accounts.Accounts; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.Category; /** * Factory for displaying data artifact types in the tree. */ public class DataArtifactTypeFactory extends TreeChildFactory { - + private final Long dataSourceId; /** @@ -49,65 +51,185 @@ public class DataArtifactTypeFactory extends TreeChildFactory getChildResults() throws IllegalArgumentException, ExecutionException { return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactCounts(dataSourceId); } - + @Override protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - return new DataArtifactTypeTreeNode(rowData); - } - - @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - String eventType = evt.getPropertyName(); - if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { - /** - * This is a stop gap measure until a different way of handling the - * closing of cases is worked out. Currently, remote events may be - * received for a case that is already closed. - */ - try { - Case.getCurrentCaseThrows(); - /** - * Due to some unresolved issues with how cases are closed, it - * is possible for the event to have a null oldValue if the - * event is a remote event. - */ - final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue(); - if (null != event && Category.DATA_ARTIFACT.equals(event.getBlackboardArtifactType().getCategory()) - && !(DataArtifactDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) { - return true; - } - } catch (NoCurrentCaseException notUsed) { - /** - * Case is closed, do nothing. - */ - } + if (rowData.getSearchParams().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + return new AccountTypeParentNode(rowData, this.dataSourceId); + } else { + return new DataArtifactTypeTreeNode(rowData); } - return false; + } + + @Override + protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { + + TreeItemDTO originalTreeItem = super.getTypedTreeItem(treeEvt, DataArtifactSearchParam.class); + + if (originalTreeItem != null + && !DataArtifactDAO.getIgnoredTreeTypes().contains(originalTreeItem.getSearchParams().getArtifactType()) + && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { + + DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); + return new TreeItemDTO<>( + BlackboardArtifact.Category.DATA_ARTIFACT.name(), + new DataArtifactSearchParam(searchParam.getArtifactType(), this.dataSourceId), + searchParam.getArtifactType().getTypeID(), + MainDAO.getInstance().getDataArtifactsDAO().getDisplayName(searchParam.getArtifactType()), + originalTreeItem.getDisplayCount()); + } + return null; + } + + @Override + public int compare(DataArtifactSearchParam o1, DataArtifactSearchParam o2) { + DataArtifactDAO dao = MainDAO.getInstance().getDataArtifactsDAO(); + return dao.getDisplayName(o1.getArtifactType()).compareToIgnoreCase(dao.getDisplayName(o2.getArtifactType())); + } + + private static String getIconPath(BlackboardArtifact.Type artType) { + String iconPath = IconsUtil.getIconFilePath(artType.getTypeID()); + return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath; } /** * Display name and count of a data artifact type in the tree. */ public static class DataArtifactTypeTreeNode extends TreeNode { - - private static String getIconPath(BlackboardArtifact.Type artType) { - String iconPath = IconsUtil.getIconFilePath(artType.getTypeID()); - return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath; - } - + public DataArtifactTypeTreeNode(TreeResultsDTO.TreeItemDTO itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData); } - + @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayDataArtifact(this.getItemData().getTypeData()); + dataResultPanel.displayDataArtifact(this.getItemData().getSearchParams()); + } + } + + /** + * The account node that has nested children of account types. + */ + @Messages({ + "DataArtifactTypeFactory_AccountTypeParentNode_displayName=Communcation Accounts" + }) + static class AccountTypeParentNode extends TreeNode { + + /** + * Sets correct title (not using artifact type display name). + * + * @param itemData The item data. + * + * @return The updated data. + */ + private static TreeItemDTO createTitledData(TreeResultsDTO.TreeItemDTO itemData) { + return new TreeItemDTO<>( + itemData.getTypeId(), + itemData.getSearchParams(), + itemData.getId(), + Bundle.DataArtifactTypeFactory_AccountTypeParentNode_displayName(), + itemData.getDisplayCount() + ); + } + + /** + * Main constructor. + * + * @param itemData The data to display. + * @param dataSourceId The data source id to filter on or null if no + * data source filter. + */ + public AccountTypeParentNode(TreeResultsDTO.TreeItemDTO itemData, Long dataSourceId) { + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), + createTitledData(itemData), + Children.create(new AccountTypeFactory(dataSourceId), true), + getDefaultLookup(itemData) + ); + } + + @Override + protected void updateDisplayName(TreeItemDTO prevData, TreeItemDTO curData) { + super.updateDisplayName(prevData, createTitledData(curData)); + } + + } + + /** + * Factory for displaying account types. + */ + static class AccountTypeFactory extends TreeChildFactory { + + private final Long dataSourceId; + + /** + * Main constructor. + * + * @param dataSourceId The data source object id for which the results + * should be filtered or null if no data source + * filtering. + */ + public AccountTypeFactory(Long dataSourceId) { + this.dataSourceId = dataSourceId; + } + + @Override + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + return MainDAO.getInstance().getCommAccountsDAO().getAccountsCounts(this.dataSourceId); + } + + @Override + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + return new AccountTypeNode(rowData); + } + + @Override + protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { + + TreeItemDTO originalTreeItem = getTypedTreeItem(treeEvt, CommAccountsSearchParams.class); + + if (originalTreeItem != null + && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { + CommAccountsSearchParams searchParam = originalTreeItem.getSearchParams(); + return TreeChildFactory.createTreeItemDTO(originalTreeItem, + new CommAccountsSearchParams(searchParam.getType(), this.dataSourceId)); + } + + return null; + } + + @Override + public int compare(CommAccountsSearchParams o1, CommAccountsSearchParams o2) { + return o1.getType().getDisplayName().compareToIgnoreCase(o2.getType().getDisplayName()); + } + } + + /** + * A node representing a single account type in the tree. + */ + static class AccountTypeNode extends TreeNode { + + /** + * Main constructor. + * + * @param itemData The data to display. + */ + public AccountTypeNode(TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getSearchParams().getType().getTypeName(), + Accounts.getIconFilePath(itemData.getSearchParams().getType()), + itemData); + } + + @Override + public void respondSelection(DataResultTopComponent dataResultPanel) { + dataResultPanel.displayAccounts(super.getItemData().getSearchParams()); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java index 1c950c7e34..8b101d9244 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java @@ -39,6 +39,7 @@ public class DirectoryNode extends BaseNode { */ public DirectoryNode(SearchResultsDTO results, DirectoryRowDTO row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); setIcon(); @@ -52,9 +53,9 @@ public class DirectoryNode extends BaseNode { private void setIcon() { // set name, display name, and icon if (getRowDTO().getContent().isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.DELETED_FOLDER.getPath()); //NON-NLS } else { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.FOLDER.getPath()); //NON-NLS } } @@ -69,7 +70,7 @@ public class DirectoryNode extends BaseNode { } @Override - public boolean supportsExtractActions() { + public boolean supportsTableExtractActions() { return true; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java index 292a86a381..790b75d485 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java @@ -29,7 +29,7 @@ import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey; -import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO; import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext; @@ -47,44 +47,6 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; */ public class FileNode extends AbstractNode implements ActionContext { - /** - * Gets the path to the icon file that should be used to visually represent - * an AbstractFile, using the file name extension to select the icon. - * - * @param file An AbstractFile. - * - * @return An icon file path. - */ - static String getIconForFileType(ExtensionMediaType fileType) { - if (fileType == null) { - return "org/sleuthkit/autopsy/images/file-icon.png"; - } - - switch (fileType) { - case IMAGE: - return "org/sleuthkit/autopsy/images/image-file.png"; - case VIDEO: - return "org/sleuthkit/autopsy/images/video-file.png"; - case AUDIO: - return "org/sleuthkit/autopsy/images/audio-file.png"; - case DOC: - return "org/sleuthkit/autopsy/images/doc-file.png"; - case EXECUTABLE: - return "org/sleuthkit/autopsy/images/exe-file.png"; - case TEXT: - return "org/sleuthkit/autopsy/images/text-file.png"; - case WEB: - return "org/sleuthkit/autopsy/images/web-file.png"; - case PDF: - return "org/sleuthkit/autopsy/images/pdf-file.png"; - case ARCHIVE: - return "org/sleuthkit/autopsy/images/archive-file.png"; - default: - case UNCATEGORIZED: - return "org/sleuthkit/autopsy/images/file-icon.png"; - } - } - private final boolean directoryBrowseMode; private final FileRowDTO fileData; private final List columns; @@ -97,8 +59,8 @@ public class FileNode extends AbstractNode implements ActionContext { // GVDTODO: at some point, this leaf will need to allow for children super(Children.LEAF, ContentNodeUtil.getLookup(file.getAbstractFile())); setIcon(file); - setDisplayName(ContentNodeUtil.getContentDisplayName(file.getFileName())); setName(ContentNodeUtil.getContentName(file.getId())); + setDisplayName(ContentNodeUtil.getContentDisplayName(file.getFileName())); setShortDescription(ContentNodeUtil.getContentDisplayName(file.getFileName())); this.directoryBrowseMode = directoryBrowseMode; this.fileData = file; @@ -116,7 +78,7 @@ public class FileNode extends AbstractNode implements ActionContext { this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS } } else { - this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType())); + this.setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(fileData.getExtensionMediaType())); } } @@ -146,7 +108,7 @@ public class FileNode extends AbstractNode implements ActionContext { } @Override - public boolean supportsExtractActions() { + public boolean supportsTableExtractActions() { return true; } @@ -166,8 +128,7 @@ public class FileNode extends AbstractNode implements ActionContext { @Override public Optional getExtractArchiveWithPasswordActionFile() { - // GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!----- - // See JIRA-8099 + // TODO: See JIRA-8099 AbstractFile file = this.fileData.getAbstractFile(); boolean isArchive = FileTypeExtensions.getArchiveExtensions().contains("." + file.getNameExtension().toLowerCase()); boolean encryptionDetected = false; @@ -207,17 +168,17 @@ public class FileNode extends AbstractNode implements ActionContext { LayoutFile lf = ((LayoutFileRowDTO) fileData).getLayoutFile(); switch (lf.getType()) { case CARVED: - setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); + setIconBaseWithExtension(NodeIconUtil.CARVED_FILE.getPath()); break; case LAYOUT_FILE: if (lf.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { - setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); } else { - setIconBaseWithExtension(getIconForFileType(layoutFileRow.getExtensionMediaType())); + setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(layoutFileRow.getExtensionMediaType())); } break; default: - setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); } } } @@ -236,12 +197,12 @@ public class FileNode extends AbstractNode implements ActionContext { AbstractFile file = fileData.getAbstractFile(); if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { if (file.getType().equals(TSK_DB_FILES_TYPE_ENUM.CARVED)) { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.CARVED_FILE.getPath()); //NON-NLS } else { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); //NON-NLS } } else { - this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType())); + this.setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(fileData.getExtensionMediaType())); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java new file mode 100644 index 0000000000..1223132df5 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -0,0 +1,563 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.nodes; + +import java.util.Optional; +import org.openide.nodes.Children; +import org.openide.nodes.Node; +import java.util.concurrent.ExecutionException; +import java.util.logging.Level; +import javax.swing.Action; +import org.openide.util.Lookup; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; +import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction; +import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction; +import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.FileSystemColumnUtils; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils; +import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.CARVED_FILE; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FILE; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FOLDER; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FOLDER; +import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; +import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext; +import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.Host; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.LocalDirectory; +import org.sleuthkit.datamodel.LocalFilesDataSource; +import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskDataException; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.VirtualDirectory; +import org.sleuthkit.datamodel.Volume; + +/** + * Factory for displaying content in the data source section of the tree. + */ +public class FileSystemFactory extends TreeChildFactory { + + private static final Logger logger = Logger.getLogger(FileSystemFactory.class.getName()); + + private Long contentId = null; + private Host host = null; + + /** + * Create a factory for a given parent content ID. + * + * @param contentId The object ID for this node + */ + public FileSystemFactory(Long contentId) { + this.contentId = contentId; + } + + /** + * Create a factory for a given parent Host. + * + * @param host The parent host for this node + */ + public FileSystemFactory(Host host) { + this.host = host; + } + + @Override + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + if (host == null) { + TreeResultsDTO results = MainDAO.getInstance().getFileSystemDAO().getDisplayableContentChildren(contentId); + return results; + } else { + TreeResultsDTO results = MainDAO.getInstance().getFileSystemDAO().getDataSourcesForHost(host); + return results; + } + } + + @Override + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + try { + Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(rowData.getSearchParams().getContentObjectId()); + if (content instanceof Image) { + return new ImageTreeNode((Image) content, rowData); + } else if (content instanceof Volume) { + return new VolumeTreeNode((Volume) content, rowData); + } else if (content instanceof Pool) { + return new PoolTreeNode((Pool) content, rowData); + } else if (content instanceof LocalFilesDataSource) { + return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) content, rowData); + } else if (content instanceof LocalDirectory) { + return new LocalDirectoryTreeNode((LocalDirectory) content, rowData); + } else if (content instanceof VirtualDirectory) { + return new VirtualDirectoryTreeNode((VirtualDirectory) content, rowData); + } else if (content instanceof Volume) { + return new VolumeTreeNode((Volume) content, rowData); + } else if (content instanceof AbstractFile) { + AbstractFile file = (AbstractFile) content; + if (file.isDir()) { + return new DirectoryTreeNode(file, rowData); + } else { + return new FileTreeNode(file, rowData); + } + } else { + return new UnsupportedTreeNode(content, rowData); + } + } catch (NoCurrentCaseException ex) { + // Case was likely closed while nodes were being created - don't fill the log with errors. + return null; + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error creating new node for content with ID: " + rowData.getSearchParams().getContentObjectId(), ex); + return null; + } + } + + @Override + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(FileSystemContentSearchParam o1, FileSystemContentSearchParam o2) { + // GVDTODO + return 0; + } + + /** + * This factory is used to produce the single data source node under "Data + * Source Files" when grouping by person/host is selected. + */ + public static class DataSourceFactory extends TreeChildFactory { + + private final long dataSourceId; + + /** + * Create the factory for a given data source object ID. + * + * @param dataSourceId The data source object ID. + */ + public DataSourceFactory(long dataSourceId) { + this.dataSourceId = dataSourceId; + } + + @Override + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + // We're not really getting children here, just creating a node for the data source itself. + return MainDAO.getInstance().getFileSystemDAO().getSingleDataSource(dataSourceId); + } + + @Override + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + try { + DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceId); + if (ds instanceof Image) { + return new ImageTreeNode((Image) ds, rowData); + } else if (ds instanceof LocalFilesDataSource) { + return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) ds, rowData); + } else { + logger.log(Level.SEVERE, "Unexpected data source type (ID: {0})", dataSourceId); + return null; + } + } catch (NoCurrentCaseException ex) { + // Case is likely closing + return null; + } catch (TskCoreException | TskDataException ex) { + logger.log(Level.SEVERE, "Error creating node from data source with ID: " + dataSourceId, ex); + return null; + } + } + + @Override + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(FileSystemContentSearchParam o1, FileSystemContentSearchParam o2) { + // GVDTODO + return 0; + } + } + + /** + * Display name and count of a file system node in the tree. + */ + @NbBundle.Messages({ + "FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files"}) + public abstract static class FileSystemTreeNode extends TreeNode implements ActionContext { + + protected FileSystemTreeNode(String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { + super(ContentNodeUtil.getContentName(itemData.getSearchParams().getContentObjectId()), icon, itemData, children, lookup); + } + + protected static Children createChildrenForContent(Long contentId) { + try { + if (FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId).isEmpty()) { + return Children.LEAF; + } else { + return Children.create(new FileSystemFactory(contentId), true); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Error creating children for content with ID: " + contentId, ex); + return Children.LEAF; + } catch (NoCurrentCaseException ex) { + return Children.LEAF; + } + } + + @Override + public void respondSelection(DataResultTopComponent dataResultPanel) { + dataResultPanel.displayFileSystemContent(this.getItemData().getSearchParams()); + } + + public abstract Node clone(); + + @Override + public Action[] getActions(boolean context) { + return ActionsFactory.getActions(this); + } + } + + static class ImageTreeNode extends FileSystemTreeNode { + + Image image; + + ImageTreeNode(Image image, TreeResultsDTO.TreeItemDTO itemData) { + super(NodeIconUtil.IMAGE.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(image)); + this.image = image; + } + + public Node clone() { + return new ImageTreeNode(image, getItemData()); + } + + @Override + public Optional getNodeSpecificActions() { + ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); + group.add(new ExtractUnallocAction( + Bundle.FileSystemFactory_FileSystemTreeNode_ExtractUnallocAction_text(), image)); + return Optional.of(group); + } + + @Override + public Optional getDataSourceForActions() { + return Optional.of(image); + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + } + + static class VolumeTreeNode extends FileSystemTreeNode { + + Volume volume; + + VolumeTreeNode(Volume volume, TreeResultsDTO.TreeItemDTO itemData) { + super(NodeIconUtil.VOLUME.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(volume)); + this.volume = volume; + } + + public Node clone() { + return new VolumeTreeNode(volume, getItemData()); + } + + @Override + public Optional getNodeSpecificActions() { + ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); + group.add(new ExtractUnallocAction( + Bundle.VolumnNode_ExtractUnallocAction_text(), volume)); + group.add(new FileSystemDetailsAction(volume)); + return Optional.of(group); + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + } + + static class PoolTreeNode extends FileSystemTreeNode { + + Pool pool; + + PoolTreeNode(Pool pool, TreeResultsDTO.TreeItemDTO itemData) { + super(NodeIconUtil.VOLUME.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(pool)); + this.pool = pool; + } + + public Node clone() { + return new PoolTreeNode(pool, getItemData()); + } + } + + static class DirectoryTreeNode extends FileSystemTreeNode { + + AbstractFile dir; + + DirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { + super(getDirectoryIcon(dir), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(dir)); + this.dir = dir; + } + + private static String getDirectoryIcon(AbstractFile dir) { + if (dir.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + return DELETED_FOLDER.getPath(); + } else { + return FOLDER.getPath(); + } + } + + public Node clone() { + return new DirectoryTreeNode(dir, getItemData()); + } + + @Override + public boolean supportsViewInTimeline() { + return true; + } + + @Override + public Optional getFileForViewInTimelineAction() { + return Optional.of(dir); + } + + @Override + public boolean supportsTreeExtractActions() { + return true; + } + + @Override + public Optional getContentForRunIngestionModuleAction() { + return Optional.of(dir); + } + + @Override + public boolean supportsContentTagAction() { + return true; + } + } + + static abstract class SpecialDirectoryTreeNode extends FileSystemTreeNode { + + AbstractFile dir; + + protected SpecialDirectoryTreeNode(AbstractFile dir, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { + super(icon, itemData, children, lookup); + this.dir = dir; + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + + @Override + public boolean supportsTreeExtractActions() { + return true; + } + + @Override + public Optional getContentForRunIngestionModuleAction() { + return Optional.of(dir); + } + } + + static class LocalDirectoryTreeNode extends SpecialDirectoryTreeNode { + + LocalDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { + super(dir, + NodeIconUtil.FOLDER.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(dir)); + } + + public Node clone() { + return new LocalDirectoryTreeNode(dir, getItemData()); + } + + @Override + public boolean supportsContentTagAction() { + return true; + } + } + + static class LocalFilesDataSourceTreeNode extends SpecialDirectoryTreeNode { + + LocalFilesDataSourceTreeNode(AbstractFile localFilesDataSource, TreeResultsDTO.TreeItemDTO itemData) { + super(localFilesDataSource, + NodeIconUtil.VOLUME.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(localFilesDataSource)); + } + + public Node clone() { + return new LocalFilesDataSourceTreeNode(dir, getItemData()); + } + + @Override + public Optional getDataSourceForActions() { + return Optional.of(dir); + } + } + + static class VirtualDirectoryTreeNode extends SpecialDirectoryTreeNode { + + VirtualDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { + super(dir, + NodeIconUtil.VIRTUAL_DIRECTORY.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(dir)); + } + + public Node clone() { + return new VirtualDirectoryTreeNode(dir, getItemData()); + } + } + + static class FileTreeNode extends FileSystemTreeNode { + + AbstractFile file; + + FileTreeNode(AbstractFile file, TreeResultsDTO.TreeItemDTO itemData) { + super(getFileIcon(file), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + ContentNodeUtil.getLookup(file)); + this.file = file; + } + + public Node clone() { + return new FileTreeNode(file, getItemData()); + } + + private static String getFileIcon(AbstractFile file) { + if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.CARVED)) { + return CARVED_FILE.getPath(); + } else { + return DELETED_FILE.getPath(); + } + } else { + MediaTypeUtils.ExtensionMediaType mediaType = MediaTypeUtils.getExtensionMediaType(file.getNameExtension()); + return MediaTypeUtils.getIconForFileType(mediaType); + } + } + + @Override + public boolean supportsViewInTimeline() { + return true; + } + + @Override + public Optional getFileForViewInTimelineAction() { + return Optional.of(file); + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + + @Override + public Optional getNewWindowActionNode() { + return Optional.of(this); + } + + @Override + public Optional getExternalViewerActionNode() { + return Optional.of(this); + } + + @Override + public boolean supportsTreeExtractActions() { + return true; + } + + @Override + public boolean supportsContentTagAction() { + return true; + } + + @Override + public Optional getFileForDirectoryBrowseMode() { + return Optional.of(file); + } + + @Override + public Optional getExtractArchiveWithPasswordActionFile() { + // TODO: See JIRA-8099 + boolean isArchive = FileTypeExtensions.getArchiveExtensions().contains("." + file.getNameExtension().toLowerCase()); + boolean encryptionDetected = false; + try { + encryptionDetected = isArchive && file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED).size() > 0; + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error loading artifacts for file with ID: " + file.getId(), ex); + } + + return encryptionDetected ? Optional.of(file) : Optional.empty(); + } + } + + @NbBundle.Messages({ + "FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content",}) + static class UnsupportedTreeNode extends FileSystemTreeNode { + + Content content; + + UnsupportedTreeNode(Content content, TreeResultsDTO.TreeItemDTO itemData) { + super(NodeIconUtil.FILE.getPath(), + itemData, + createChildrenForContent(itemData.getSearchParams().getContentObjectId()), + getDefaultLookup(itemData)); + this.content = content; + } + + public Node clone() { + return new UnsupportedTreeNode(content, getItemData()); + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java index 19e7bd74db..5b9c900689 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java @@ -41,9 +41,10 @@ public class ImageNode extends BaseNode { */ public ImageNode(SearchResultsDTO results, ImageRowDTO row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); - setIconBaseWithExtension("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); //NON-NLS + setIconBaseWithExtension(NodeIconUtil.IMAGE.getPath()); //NON-NLS } @NbBundle.Messages({ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java new file mode 100644 index 0000000000..059db7b50c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java @@ -0,0 +1,74 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.mainui.nodes; + +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.LocalFilesDataSource; +import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.Volume; + +/** + * Consolidates node paths shared between the result view table and the tree. + */ +class NodeIconUtil { + + final static NodeIconUtil FOLDER = new NodeIconUtil("org/sleuthkit/autopsy/images/Folder-icon.png"); + final static NodeIconUtil DELETED_FOLDER = new NodeIconUtil("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); + final static NodeIconUtil VIRTUAL_DIRECTORY = new NodeIconUtil("org/sleuthkit/autopsy/images/folder-icon-virtual.png"); + final static NodeIconUtil CARVED_FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); + final static NodeIconUtil DELETED_FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + final static NodeIconUtil IMAGE = new NodeIconUtil("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); + final static NodeIconUtil VOLUME = new NodeIconUtil("org/sleuthkit/autopsy/images/vol-icon.png"); + final static NodeIconUtil POOL = new NodeIconUtil("org/sleuthkit/autopsy/images/pool-icon.png"); + final static NodeIconUtil FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/file-icon.png"); + final static NodeIconUtil LOCAL_FILES_DATA_SOURCE = new NodeIconUtil("org/sleuthkit/autopsy/images/fileset-icon-16.png"); + //final static NodeIconUtil = new NodeIconUtil(""); + + private final String iconPath; + + private NodeIconUtil(String path) { + this.iconPath = path; + } + + String getPath() { + return iconPath; + } + + public static String getPathForContent(Content c) { + if (c instanceof Image) { + return IMAGE.getPath(); + } else if (c instanceof LocalFilesDataSource) { + return LOCAL_FILES_DATA_SOURCE.getPath(); + } else if (c instanceof Volume) { + return VOLUME.getPath(); + } else if (c instanceof Pool) { + return POOL.getPath(); + } else if (c instanceof AbstractFile) { + AbstractFile file = (AbstractFile) c; + if (((AbstractFile) c).isDir()) { + if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + return DELETED_FOLDER.getPath(); + } else { + return FOLDER.getPath(); + } + } else { + if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.CARVED)) { + return CARVED_FILE.getPath(); + } else { + return DELETED_FILE.getPath(); + } + } else { + return FILE.getPath(); + } + } + } + return FILE.getPath(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java index 93e8e852a4..667d062251 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java @@ -39,7 +39,7 @@ public class OsAccountNode extends BaseNode{ results, rowData); String name = rowData.getContent().getName(); - setName(name); + setName(ContentNodeUtil.getContentName(rowData.getContent().getId())); setDisplayName(name); setShortDescription(name); setIconBaseWithExtension(ICON_PATH); @@ -56,7 +56,7 @@ public class OsAccountNode extends BaseNode{ } @Override - public boolean supportsExtractActions() { + public boolean supportsTableExtractActions() { return true; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java index 94ebfa58e0..198a13a124 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java @@ -41,8 +41,9 @@ public class PoolNode extends BaseNode { results, row); String name = row.getContent().getType().getName(); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(name); setShortDescription(name); - setIconBaseWithExtension("org/sleuthkit/autopsy/images/pool-icon.png"); + setIconBaseWithExtension(NodeIconUtil.POOL.getPath()); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java index d84c6cc313..c331eac5c7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java @@ -18,9 +18,9 @@ */ package org.sleuthkit.autopsy.mainui.nodes; -import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.concurrent.ExecutionException; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; /** @@ -133,7 +133,7 @@ public class SearchManager { */ public synchronized SearchResultsDTO updatePageIdx(int pageIdx) throws IllegalArgumentException, ExecutionException { setPageIdx(pageIdx); - return fetchResults(false); + return getResults(); } /** @@ -171,11 +171,11 @@ public class SearchManager { /** * Determines if a refresh is required for the currently selected item. * - * @param evt The ingest module event. + * @param evt The event. * * @return True if an update is required. */ - public synchronized boolean isRefreshRequired(PropertyChangeEvent evt) { + public synchronized boolean isRefreshRequired(DAOEvent evt) { return isRefreshRequired(this.daoFetcher, evt); } @@ -183,11 +183,11 @@ public class SearchManager { * Determines if a refresh is required for the currently selected item. * * @param dataFetcher The data fetcher. - * @param evt The ingest module event. + * @param evt The event. * * @return True if an update is required. */ - private synchronized

boolean isRefreshRequired(DAOFetcher

dataFetcher, PropertyChangeEvent evt) { + private synchronized

boolean isRefreshRequired(DAOFetcher

dataFetcher, DAOEvent evt) { if (dataFetcher == null) { return false; } @@ -195,17 +195,6 @@ public class SearchManager { return dataFetcher.isRefreshRequired(evt); } - /** - * Forces a refresh of data based on current search parameters. - * - * @return The refreshed data. - * - * @throws ExecutionException - */ - public synchronized SearchResultsDTO getRefreshedData() throws ExecutionException { - return fetchResults(true); - } - /** * Queries the dao cache for results storing the result in the current * search results. @@ -216,25 +205,13 @@ public class SearchManager { * @throws ExecutionException */ public synchronized SearchResultsDTO getResults() throws IllegalArgumentException, ExecutionException { - return fetchResults(false); + return fetchResults(this.daoFetcher); } - /** - * Fetches results using current page fetcher or returns null if no current - * page fetcher. Also stores current results in local variable. - * - * @return The current search results or null if no current page fetcher. - * - * @throws ExecutionException - */ - private synchronized SearchResultsDTO fetchResults(boolean hardRefresh) throws ExecutionException { - return fetchResults(this.daoFetcher, hardRefresh); - } - - private synchronized SearchResultsDTO fetchResults(DAOFetcher dataFetcher, boolean hardRefresh) throws ExecutionException { + private synchronized SearchResultsDTO fetchResults(DAOFetcher dataFetcher) throws ExecutionException { SearchResultsDTO newResults = null; if (dataFetcher != null) { - newResults = dataFetcher.getSearchResults(this.pageSize, this.pageIdx, hardRefresh); + newResults = dataFetcher.getSearchResults(this.pageSize, this.pageIdx); } this.currentSearchResults = newResults; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java index 7e05ed6ef5..0ad6a615a8 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java @@ -43,6 +43,7 @@ abstract class SpecialDirectoryNode extends BaseNode row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); } @@ -58,7 +59,7 @@ abstract class SpecialDirectoryNode extends BaseNode getContentForFileSearchAction() { - return Optional.of(getRowDTO().getContent()); - } - @Override public Optional getDataSourceForActions() { return getRowDTO().getContent().isDataSource() diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 395ef12f2e..74baf01e6f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -22,83 +22,87 @@ import com.google.common.collect.MapMaker; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; -import java.util.EnumSet; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.logging.Level; -import java.util.stream.Collectors; import org.openide.nodes.ChildFactory; import org.openide.nodes.Node; import org.openide.util.WeakListeners; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.guiutils.RefreshThrottler; -import org.sleuthkit.autopsy.guiutils.RefreshThrottler.Refresher; -import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** - * Factory for populating tree with results. + * Factory for populating child nodes in a tree based on TreeResultsDTO */ -public abstract class TreeChildFactory extends ChildFactory.Detachable implements Refresher { +public abstract class TreeChildFactory extends ChildFactory.Detachable implements Comparator { private static final Logger logger = Logger.getLogger(TreeChildFactory.class.getName()); - private static final Set INGEST_JOB_EVENTS_OF_INTEREST - = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED); - - private final RefreshThrottler refreshThrottler = new RefreshThrottler(this); - private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { - String eventType = evt.getPropertyName(); - if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { - // case was closed. Remove listeners so that we don't get called with a stale case handle - if (evt.getNewValue() == null) { - removeNotify(); - } - } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) - || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { - /** - * This is a stop gap measure until a different way of handling the - * closing of cases is worked out. Currently, remote events may be - * received for a case that is already closed. - */ - try { - Case.getCurrentCaseThrows(); - refresh(false); - } catch (NoCurrentCaseException notUsed) { - /** - * Case is closed, do nothing. - */ + if (evt.getNewValue() instanceof DAOAggregateEvent) { + DAOAggregateEvent aggEvt = (DAOAggregateEvent) evt.getNewValue(); + for (DAOEvent daoEvt : aggEvt.getEvents()) { + if (daoEvt instanceof TreeEvent) { + TreeEvent treeEvt = (TreeEvent) daoEvt; + TreeItemDTO item = getOrCreateRelevantChild(treeEvt); + if (item != null) { + if (treeEvt.isRefreshRequired()) { + update(); + break; + } else { + updateNodeData(item); + } + } + } } } }; - private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null); + private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, MainDAO.getInstance().getTreeEventsManager()); + // maps the Node keys to the child TreeNode. Used to update existing Node with new counts private final Map> typeNodeMap = new MapMaker().weakValues().makeMap(); + private final Object resultsUpdateLock = new Object(); + + // Results of the last full load from the DAO. May not be complete because + // events will come in with more updated data. private TreeResultsDTO curResults = null; + + // All current child items (sorted). May have more items than curResults does because + // this is updated based on events and new data. + private List> curItemsList = new ArrayList<>(); + + // maps the Node key (ID) to its DTO private Map> idMapping = new HashMap<>(); @Override protected boolean createKeys(List toPopulate) { - if (curResults == null) { - try { - updateData(); - } catch (IllegalArgumentException | ExecutionException ex) { - logger.log(Level.WARNING, "An error occurred while fetching keys", ex); - return false; + List> itemsList; + synchronized (resultsUpdateLock) { + // Load data from DAO if we haven't already + if (curResults == null) { + try { + updateData(); + } catch (IllegalArgumentException | ExecutionException ex) { + logger.log(Level.WARNING, "An error occurred while fetching keys", ex); + return false; + } } + // make copy to avoid concurrent modification + itemsList = new ArrayList<>(curItemsList); } // update existing cached nodes List curResultIds = new ArrayList<>(); - for (TreeItemDTO dto : curResults.getItems()) { + for (TreeItemDTO dto : itemsList) { TreeNode currentlyCached = typeNodeMap.get(dto.getId()); if (currentlyCached != null) { currentlyCached.update(dto); @@ -122,25 +126,55 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item) { + TreeNode cachedTreeNode = this.typeNodeMap.get(item.getId()); + if (cachedTreeNode == null) { + synchronized (resultsUpdateLock) { + // add to id mapping + this.idMapping.put(item.getId(), item); + + // insert in sorted position + int insertIndex = 0; + for (; insertIndex < this.curItemsList.size(); insertIndex++) { + if (this.compare(item.getSearchParams(), this.curItemsList.get(insertIndex).getSearchParams()) < 0) { + break; + } + } + this.curItemsList.add(insertIndex, item); + } + this.refresh(false); + } else { + cachedTreeNode.update(item); + } + } + + /** + * Updates local data structures by fetching new data from the DAO's. * * @throws IllegalArgumentException * @throws ExecutionException */ protected void updateData() throws IllegalArgumentException, ExecutionException { - this.curResults = getChildResults(); - this.idMapping = curResults.getItems().stream() - .collect(Collectors.toMap(item -> item.getId(), item -> item, (item1, item2) -> item1)); + synchronized (resultsUpdateLock) { + this.curResults = getChildResults(); + Map> idMapping = new HashMap<>(); + List> curItemsList = new ArrayList<>(); + for (TreeItemDTO item : this.curResults.getItems()) { + idMapping.put(item.getId(), item); + curItemsList.add(item); + } - } - - @Override - public void refresh() { - update(); + this.idMapping = idMapping; + this.curItemsList = curItemsList; + } } /** - * Fetches child view from the database and updates the tree. + * Updates the tree using new data from the DAO. */ public void update() { try { @@ -156,40 +190,41 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable extends ChildFactory.Detachable TreeItemDTO createTreeItemDTO(TreeItemDTO original, T updatedData) { + return new TreeItemDTO<>( + original.getTypeId(), + updatedData, + original.getId(), + original.getDisplayName(), + original.getDisplayCount()); + } + + /** + * Returns the underlying tree item dto in the tree event if the search + * params of the tree item dto are of the expected type. Otherwise, returns + * null. + * + * @param treeEvt The tree event. + * @param expectedSearchParamsType The expected type of the search params of + * the tree item dto in the tree event. + * + * @return The typed tree item dto in the tree event or null if no match + * found. + */ + protected TreeItemDTO getTypedTreeItem(TreeEvent treeEvt, Class expectedSearchParamsType) { + if (treeEvt != null && treeEvt.getItemRecord() != null && treeEvt.getItemRecord().getSearchParams() != null + && expectedSearchParamsType.isAssignableFrom(treeEvt.getItemRecord().getSearchParams().getClass())) { + + @SuppressWarnings("unchecked") + TreeItemDTO originalTreeItem = (TreeItemDTO) treeEvt.getItemRecord(); + return originalTreeItem; + } + return null; + } + /** * Creates a TreeNode given the tree item data. * @@ -217,4 +293,15 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable getChildResults() throws IllegalArgumentException, ExecutionException; + + /** + * Creates a child tree item dto that can be used to find the affected child + * node that requires updates. + * + * @param treeEvt The tree event. + * + * @return The tree item dto that can be used to find the child node + * affected by the tree event. + */ + protected abstract TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java index 847069d12e..7fd7cf4593 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import org.sleuthkit.autopsy.corecomponents.SelectionResponder; import java.text.MessageFormat; +import java.util.Objects; import java.util.logging.Level; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; @@ -44,7 +45,7 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo * @return The lookup to use in the node. */ protected static Lookup getDefaultLookup(TreeItemDTO itemData) { - return Lookups.fixed(itemData, itemData.getTypeData()); + return Lookups.fixed(itemData, itemData.getSearchParams()); } private TreeItemDTO itemData; @@ -88,7 +89,7 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo protected TreeItemDTO getItemData() { return itemData; } - + /** * Sets the display name of the node to include the display name and count * of the item. @@ -100,10 +101,10 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo // update display name only if there is a change. if (prevData == null || !prevData.getDisplayName().equals(curData.getDisplayName()) - || prevData.getCount() != curData.getCount()) { - String displayName = curData.getCount() == null + || !Objects.equals(prevData.getDisplayCount(), curData.getDisplayCount())) { + String displayName = curData.getDisplayCount() == null ? curData.getDisplayName() - : MessageFormat.format("{0} ({1})", curData.getDisplayName(), curData.getCount()); + : curData.getDisplayName() + curData.getDisplayCount().getDisplaySuffix(); this.setDisplayName(displayName); } @@ -119,7 +120,7 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo public void update(TreeItemDTO updatedData) { if (updatedData == null) { logger.log(Level.WARNING, "Expected non-null updatedData"); - } else if (this.itemData != null && this.itemData.getId() != updatedData.getId()) { + } else if (this.itemData != null && !Objects.equals(this.itemData.getId(), updatedData.getId())) { logger.log(Level.WARNING, MessageFormat.format( "Expected update data to have same id but received [id: {0}] replacing [id: {1}]", updatedData.getId(), diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index a608827b24..c4ddbf366e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.beans.PropertyChangeEvent; import java.util.Collection; +import java.util.Comparator; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -33,9 +34,9 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileExtSearchFilter; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; -import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams.FileSizeFilter; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.AbstractFile; /** @@ -44,34 +45,7 @@ import org.sleuthkit.datamodel.AbstractFile; */ public class ViewsTypeFactory { - /** - * Returns an AbstractFile if the event contains a ModuleContentEvent which - * contains an abstract file and that file belongs to the data source if a - * data source id is specified. Otherwise, returns null. - * - * @param evt The event - * @param dataSourceId The data source object id that will be the parent of - * the file or null. - * - * @return The file meeting criteria or null. - */ - private static AbstractFile getFileInDataSourceFromEvt(PropertyChangeEvent evt, Long dataSourceId) { - if (!(evt.getOldValue() instanceof ModuleContentEvent)) { - return null; - } - - ModuleContentEvent contentEvt = (ModuleContentEvent) evt.getOldValue(); - if (!(contentEvt.getSource() instanceof AbstractFile)) { - return null; - } - - AbstractFile file = (AbstractFile) contentEvt.getSource(); - if (dataSourceId != null && file.getDataSourceObjectId() != dataSourceId) { - return null; - } - - return file; - } + private static final Comparator STRING_COMPARATOR = Comparator.nullsFirst(Comparator.naturalOrder()); /** * The factory for creating file size tree nodes. @@ -100,20 +74,14 @@ public class ViewsTypeFactory { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - AbstractFile evtFile = getFileInDataSourceFromEvt(evt, this.dataSourceId); - if (evtFile == null) { - return false; - } + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } - long size = evtFile.getSize(); - for (FileSizeFilter filter : FileSizeFilter.values()) { - if (size >= filter.getMinBound() || size < filter.getMaxBound()) { - return true; - } - } - - return false; + @Override + public int compare(FileTypeSizeSearchParams o1, FileTypeSizeSearchParams o2) { + return Integer.compare(o1.getSizeFilter().getId(), o2.getSizeFilter().getId()); } /** @@ -127,12 +95,12 @@ public class ViewsTypeFactory { * @param itemData The data for the node. */ FileSizeTypeNode(TreeResultsDTO.TreeItemDTO itemData) { - super("FILE_SIZE_" + itemData.getTypeData().getSizeFilter().getName(), "org/sleuthkit/autopsy/images/file-size-16.png", itemData); + super("FILE_SIZE_" + itemData.getSearchParams().getSizeFilter().getName(), "org/sleuthkit/autopsy/images/file-size-16.png", itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayFileSizes(this.getItemData().getTypeData()); + dataResultPanel.displayFileSizes(this.getItemData().getSearchParams()); } } @@ -165,8 +133,14 @@ public class ViewsTypeFactory { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - return getFileInDataSourceFromEvt(evt, this.dataSourceId) != null; + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(FileTypeMimeSearchParams o1, FileTypeMimeSearchParams o2) { + return STRING_COMPARATOR.compare(o1.getMimeType(), o2.getMimeType()); } static class FileMimePrefixNode extends TreeNode { @@ -178,10 +152,10 @@ public class ViewsTypeFactory { */ public FileMimePrefixNode(TreeResultsDTO.TreeItemDTO itemData) { super( - "FILE_MIME_" + itemData.getTypeData().getMimeType(), + "FILE_MIME_" + itemData.getSearchParams().getMimeType(), "org/sleuthkit/autopsy/images/file_types.png", itemData, - Children.create(new FileMimeSuffixFactory(itemData.getTypeData().getDataSourceId(), itemData.getTypeData().getMimeType()), true), + Children.create(new FileMimeSuffixFactory(itemData.getSearchParams().getDataSourceId(), itemData.getSearchParams().getMimeType()), true), getDefaultLookup(itemData)); } } @@ -219,13 +193,14 @@ public class ViewsTypeFactory { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - AbstractFile file = getFileInDataSourceFromEvt(evt, dataSourceId); - if (file == null || file.getMIMEType() == null) { - return false; - } + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } - return file.getMIMEType().toLowerCase().startsWith(this.mimeTypePrefix.toLowerCase()); + @Override + public int compare(FileTypeMimeSearchParams o1, FileTypeMimeSearchParams o2) { + return STRING_COMPARATOR.compare(o1.getMimeType(), o2.getMimeType()); } /** @@ -240,14 +215,14 @@ public class ViewsTypeFactory { * @param itemData The data for the node. */ public FileMimeSuffixNode(TreeResultsDTO.TreeItemDTO itemData) { - super("FILE_MIME_" + itemData.getTypeData().getMimeType(), + super("FILE_MIME_" + itemData.getSearchParams().getMimeType(), "org/sleuthkit/autopsy/images/file-filter-icon.png", itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayFileMimes(this.getItemData().getTypeData()); + dataResultPanel.displayFileMimes(this.getItemData().getSearchParams()); } } @@ -286,9 +261,9 @@ public class ViewsTypeFactory { @Override protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { Collection childFilters; - if (rowData.getTypeData().getFilter() == FileExtRootFilter.TSK_DOCUMENT_FILTER) { + if (rowData.getSearchParams().getFilter() == FileExtRootFilter.TSK_DOCUMENT_FILTER) { childFilters = Stream.of(FileExtDocumentFilter.values()).collect(Collectors.toList()); - } else if (rowData.getTypeData().getFilter() == FileExtRootFilter.TSK_EXECUTABLE_FILTER) { + } else if (rowData.getSearchParams().getFilter() == FileExtRootFilter.TSK_EXECUTABLE_FILTER) { childFilters = Stream.of(FileExtExecutableFilter.values()).collect(Collectors.toList()); } else { childFilters = null; @@ -303,11 +278,14 @@ public class ViewsTypeFactory { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - AbstractFile file = getFileInDataSourceFromEvt(evt, this.dataSourceId); - return file != null && this.childFilters.stream() - .anyMatch((filter) -> MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating( - new FileTypeExtensionsSearchParams(filter, this.dataSourceId), file)); + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { + //GVDTODO + return null; + } + + @Override + public int compare(FileTypeExtensionsSearchParams o1, FileTypeExtensionsSearchParams o2) { + return STRING_COMPARATOR.compare(o1.getFilter().getDisplayName(), o2.getFilter().getDisplayName()); } /** @@ -326,10 +304,10 @@ public class ViewsTypeFactory { * children of this node. */ public FileExtNode(TreeResultsDTO.TreeItemDTO itemData, Collection childFilters) { - super("FILE_EXT_" + itemData.getTypeData().getFilter().getName(), + super("FILE_EXT_" + itemData.getSearchParams().getFilter().getName(), childFilters == null ? "org/sleuthkit/autopsy/images/file-filter-icon.png" : "org/sleuthkit/autopsy/images/file_types.png", itemData, - childFilters == null ? Children.LEAF : Children.create(new FileExtFactory(itemData.getTypeData().getDataSourceId(), childFilters), true), + childFilters == null ? Children.LEAF : Children.create(new FileExtFactory(itemData.getSearchParams().getDataSourceId(), childFilters), true), getDefaultLookup(itemData)); this.childFilters = childFilters; @@ -338,7 +316,7 @@ public class ViewsTypeFactory { @Override public void respondSelection(DataResultTopComponent dataResultPanel) { if (childFilters == null) { - dataResultPanel.displayFileExtensions(this.getItemData().getTypeData()); + dataResultPanel.displayFileExtensions(this.getItemData().getSearchParams()); } else { super.respondSelection(dataResultPanel); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java index ef77cc797a..99d600bf6c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java @@ -42,13 +42,14 @@ public class VolumeNode extends BaseNode { */ public VolumeNode(SearchResultsDTO results, VolumeRowDTO row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); - setIconBaseWithExtension("org/sleuthkit/autopsy/images/vol-icon.png"); //NON-NLS + setIconBaseWithExtension(NodeIconUtil.VOLUME.getPath()); //NON-NLS // use first cell value for display name String displayName = row.getCellValues().size() > 0 ? row.getCellValues().get(0).toString() : ""; + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(displayName); setShortDescription(displayName); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java index 363dc2303f..f4a555ce9c 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java @@ -158,11 +158,22 @@ public interface ActionContext { } /** - * Returns true if the context supported the extract action. + * Returns true if the context supported the extract actions + * for nodes in the table view. * * @return True if the action is supported. */ - default boolean supportsExtractActions() { + default boolean supportsTableExtractActions() { + return false; + } + + /** + * Returns true if the context supported the extract actions + * for nodes in the tree view. + * + * @return True if the action is supported. + */ + default boolean supportsTreeExtractActions() { return false; } @@ -207,10 +218,6 @@ public interface ActionContext { return Optional.empty(); } - default Optional getContentForFileSearchAction() { - return Optional.empty(); - } - default Optional getDataSourceForActions() { return Optional.empty(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java index 51cc94bd3d..f8d75ba4e7 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java @@ -27,6 +27,7 @@ import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.function.Consumer; +import java.util.logging.Level; import java.util.stream.Stream; import javax.swing.Action; import org.openide.actions.PropertiesAction; @@ -47,6 +48,7 @@ import org.sleuthkit.autopsy.actions.ViewArtifactAction; import org.sleuthkit.autopsy.actions.ViewOsAccountAction; import org.sleuthkit.autopsy.casemodule.DeleteDataSourceAction; import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.BlackboardArtifactItem; import org.sleuthkit.autopsy.datamodel.DataModelActionsFactory; import org.sleuthkit.autopsy.datasourcesummary.ui.ViewSummaryInformationAction; @@ -77,6 +79,8 @@ import org.sleuthkit.datamodel.TskCoreException; * their supported actions. */ public final class ActionsFactory { + + private static final Logger logger = Logger.getLogger(ActionsFactory.class.getName()); // private constructor for utility class. private ActionsFactory() {} @@ -137,8 +141,10 @@ public final class ActionsFactory { } } - if (actionContext.supportsExtractActions()) { - actionGroups.add(getExtractActions()); + if (actionContext.supportsTableExtractActions()) { + actionGroups.add(getTableExtractActions()); + } else if (actionContext.supportsTreeExtractActions()) { + actionGroups.add(getTreeExtractActions()); } group = new ActionGroup(); @@ -174,12 +180,12 @@ public final class ActionsFactory { } /** - * Returns the Extract actions. These actions are not specific to the + * Returns the Extract actions for a table node. These actions are not specific to the * ActionContext. * * @return The Extract ActionGroup. */ - static ActionGroup getExtractActions() { + static ActionGroup getTableExtractActions() { ActionGroup actionsGroup = new ActionGroup(); Lookup lookup = Utilities.actionsGlobalContext(); @@ -191,6 +197,19 @@ public final class ActionsFactory { return actionsGroup; } + + /** + * Returns the Extract actions for a tree node. These actions are not specific to the + * ActionContext. + * + * @return The Extract ActionGroup. + */ + static ActionGroup getTreeExtractActions() { + ActionGroup actionsGroup = new ActionGroup(); + actionsGroup.add(ExtractAction.getInstance()); + + return actionsGroup; + } /** * Returns the ActionGroup for the source content viewer actions . @@ -425,15 +444,10 @@ public final class ActionsFactory { "ActionFactory_openFileSearchByAttr_text=Open File Search by Attributes" }) private static Optional getRunIngestAction(ActionContext context) { - ActionGroup group = new ActionGroup(); - Optional optional = context.getContentForFileSearchAction(); - + ActionGroup group = new ActionGroup(); + Optional optional = context.getDataSourceForActions(); if(optional.isPresent()) { group.add(new FileSearchAction(Bundle.ActionFactory_openFileSearchByAttr_text(), optional.get().getId())); - } - - optional = context.getDataSourceForActions(); - if(optional.isPresent()) { group.add(new ViewSummaryInformationAction(optional.get().getId())); group.add(new RunIngestModulesAction(Collections.singletonList(optional.get()))); group.add(new DeleteDataSourceAction(optional.get().getId())); @@ -442,7 +456,11 @@ public final class ActionsFactory { optional = context.getContentForRunIngestionModuleAction(); if(optional.isPresent()) { - group.add(new RunIngestModulesAction(Collections.singletonList(optional.get()))); + if (optional.get() instanceof AbstractFile) { + group.add(new RunIngestModulesAction((AbstractFile)optional.get())); + } else { + logger.log(Level.WARNING, "Can not create RunIngestModulesAction on non-AbstractFile content with ID " + optional.get().getId()); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java index 3e6e4a9b07..1d53771dd8 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java @@ -69,7 +69,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION; @@ -327,8 +326,7 @@ class SevenZipExtractor { TSK_COMMENT, MODULE_NAME, details)); - if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_ITEM, attributes)) { - + if (!blackboard.artifactExists(archiveFile, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) { BlackboardArtifact artifact = rootArchive.getArchiveFile().newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE, null, setName, null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 78b8070852..a2c37ce005 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -36,7 +36,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFil import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; @@ -194,7 +193,7 @@ public class FileTypeIdIngestModule implements FileIngestModule { Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard(); // Create artifact if it doesn't already exist. - if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_ITEM, attributes)) { + if (!tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) { BlackboardArtifact artifact = file.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE, null, fileType.getInterestingFilesSetName(), null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java index c89e7893c6..d8091efd92 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java @@ -439,7 +439,7 @@ public class HashDbIngestModule implements FileIngestModule { attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), db.getDisplayName())); try { Blackboard tskBlackboard = skCase.getBlackboard(); - if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) { + if (tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_HASHSET_HIT, attributesList) == false) { postHashSetHitToBlackboard(file, file.getMd5Hash(), db, comment); } } catch (TskCoreException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java index 284b595dc0..8ee1cf1589 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java @@ -39,7 +39,6 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; @@ -142,7 +141,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule { ); // Create artifact if it doesn't already exist. - if (!blackboard.artifactExists(file, TSK_INTERESTING_ITEM, attributes)) { + if (!blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) { BlackboardArtifact artifact = file.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE, null, filesSet.getName(), null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java b/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java index 2806931f69..06a8717d86 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java @@ -48,7 +48,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.modules.pictureanalyzer.PictureAnalyzerIngestModuleFactory; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -68,7 +67,6 @@ import org.sleuthkit.datamodel.Score; public class EXIFProcessor implements PictureProcessor { private static final Logger logger = Logger.getLogger(EXIFProcessor.class.getName()); - private static final BlackboardArtifact.Type EXIF_METADATA = new BlackboardArtifact.Type(TSK_METADATA_EXIF); @Override @NbBundle.Messages({ @@ -151,7 +149,7 @@ public class EXIFProcessor implements PictureProcessor { final Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); - if (!attributes.isEmpty() && !blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) { + if (!attributes.isEmpty() && !blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_METADATA_EXIF, attributes)) { List artifacts = new ArrayList<>(); final BlackboardArtifact exifArtifact = (file.newAnalysisResult( BlackboardArtifact.Type.TSK_METADATA_EXIF, diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java index 1af3f45d14..a73332c526 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java @@ -539,28 +539,28 @@ public class TableSearchTest extends NbTestCase { DataArtifactSearchParam param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_CONTACT, null); DataArtifactDAO dataArtifactDAO = MainDAO.getInstance().getDataArtifactsDAO(); - DataArtifactTableSearchResultsDTO results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + DataArtifactTableSearchResultsDTO results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_CONTACT, results.getArtifactType()); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get contacts from data source 2 param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_CONTACT, dataSource2.getId()); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_CONTACT, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get bookmarks from data source 2 param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, dataSource2.getId()); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, results.getArtifactType()); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get all custom artifacts param = new DataArtifactSearchParam(customDataArtifactType, null); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(customDataArtifactType, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -597,7 +597,7 @@ public class TableSearchTest extends NbTestCase { // Get the first page param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, pageSize, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, pageSize); assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount()); assertEquals(pageSize.longValue(), results.getItems().size()); @@ -613,7 +613,7 @@ public class TableSearchTest extends NbTestCase { // Get the second page param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null); - results = dataArtifactDAO.getDataArtifactsForTable(param, pageSize, pageSize, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, pageSize, pageSize); assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount()); assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK - pageSize, results.getItems().size()); @@ -641,19 +641,19 @@ public class TableSearchTest extends NbTestCase { // Get emails from all data sources CommAccountsSearchParams param = new CommAccountsSearchParams(Account.Type.EMAIL, null); - SearchResultsDTO results = commAccountsDAO.getCommAcounts(param, 0, null, false); + SearchResultsDTO results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get device accounts from data source 1 param = new CommAccountsSearchParams(Account.Type.DEVICE, dataSource1.getId()); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get email accounts from data source 2 param = new CommAccountsSearchParams(Account.Type.EMAIL, dataSource2.getId()); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -673,13 +673,13 @@ public class TableSearchTest extends NbTestCase { // Get phone accounts from all data sources param = new CommAccountsSearchParams(Account.Type.PHONE, null); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get phone accounts from data source 2 param = new CommAccountsSearchParams(Account.Type.PHONE, dataSource2.getId()); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -707,37 +707,37 @@ public class TableSearchTest extends NbTestCase { // Get plain text files from data source 1 FileTypeMimeSearchParams param = new FileTypeMimeSearchParams("text/plain", dataSource1.getId()); - SearchResultsDTO results = viewsDAO.getFilesByMime(param, 0, null, false); + SearchResultsDTO results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get jpeg files from data source 1 param = new FileTypeMimeSearchParams("image/jpeg", dataSource1.getId()); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get jpeg files from data source 2 param = new FileTypeMimeSearchParams("image/jpeg", dataSource2.getId()); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Search for mime type that should produce no results param = new FileTypeMimeSearchParams("blah/blah", null); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get plain text files from all data sources param = new FileTypeMimeSearchParams("text/plain", null); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get the custom file by MIME type param = new FileTypeMimeSearchParams(CUSTOM_MIME_TYPE, null); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -761,32 +761,32 @@ public class TableSearchTest extends NbTestCase { ViewsDAO viewsDAO = MainDAO.getInstance().getViewsDAO(); // Get "50 - 200MB" files from data source 1 - FileTypeSizeSearchParams param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_50_200, dataSource1.getId()); - SearchResultsDTO results = viewsDAO.getFilesBySize(param, 0, null, false); + FileTypeSizeSearchParams param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_50_200, dataSource1.getId()); + SearchResultsDTO results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get "200MB - 1GB" files from data source 1 - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_200_1000, dataSource1.getId()); - results = viewsDAO.getFilesBySize(param, 0, null, false); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_200_1000, dataSource1.getId()); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "200MB - 1GB" files from data source 2 - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_200_1000, dataSource2.getId()); - results = viewsDAO.getFilesBySize(param, 0, null, false); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_200_1000, dataSource2.getId()); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get "1GB+" files from all data sources - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_1000_, null); - results = viewsDAO.getFilesBySize(param, 0, null, false); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_1000_, null); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "50 - 200MB" files from all data sources - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_50_200, null); - results = viewsDAO.getFilesBySize(param, 0, null, false); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_50_200, null); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); } catch (ExecutionException ex) { @@ -804,25 +804,25 @@ public class TableSearchTest extends NbTestCase { // Get "Tag1" file tags from data source 1 TagsSearchParams param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource1.getId()); - SearchResultsDTO results = tagsDAO.getTags(param, 0, null, false); + SearchResultsDTO results = tagsDAO.getTags(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "Tag2" file tags from data source 1 param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, dataSource1.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get "Tag2" file tags from all data sources param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, null); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get "Tag1" file tags from data source 2 param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource2.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -846,19 +846,19 @@ public class TableSearchTest extends NbTestCase { // Get "Tag1" result tags from data source 2 param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource2.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "Tag2" result tags from data source 1 param = new TagsSearchParams(tag2, TagsSearchParams.TagType.RESULT, dataSource1.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get "Tag1" result tags from data source 1 param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource1.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -885,19 +885,19 @@ public class TableSearchTest extends NbTestCase { // Get OS Accounts from data source 1 OsAccountsSearchParams param = new OsAccountsSearchParams(dataSource1.getId()); - SearchResultsDTO results = accountsDAO.getAccounts(param, 0, null, false); + SearchResultsDTO results = accountsDAO.getAccounts(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get OS Accounts from all data sources param = new OsAccountsSearchParams(null); - results = accountsDAO.getAccounts(param, 0, null, false); + results = accountsDAO.getAccounts(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get OS Accounts from data source 1 param = new OsAccountsSearchParams(dataSource2.getId()); - results = accountsDAO.getAccounts(param, 0, null, false); + results = accountsDAO.getAccounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -930,21 +930,21 @@ public class TableSearchTest extends NbTestCase { AnalysisResultSearchParam param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, null); AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO(); - AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null, false); + AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, results.getArtifactType()); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get encryption detected artifacts from data source 2 param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, dataSource2.getId()); - results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get all custom artifacts param = new AnalysisResultSearchParam(customAnalysisResultType, null); - results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null); assertEquals(customAnalysisResultType, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -979,7 +979,7 @@ public class TableSearchTest extends NbTestCase { // Get the first page param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null); - results = analysisResultDAO.getAnalysisResultsForTable(param, 0, pageSize, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, 0, pageSize); assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount()); assertEquals(pageSize.longValue(), results.getItems().size()); @@ -995,7 +995,7 @@ public class TableSearchTest extends NbTestCase { // Get the second page param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null); - results = analysisResultDAO.getAnalysisResultsForTable(param, pageSize, pageSize, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, pageSize, pageSize); assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount()); assertEquals(ARTIFACT_COUNT_YARA - pageSize, results.getItems().size()); @@ -1019,13 +1019,13 @@ public class TableSearchTest extends NbTestCase { // Test hash set hits AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO(); HashHitSearchParam hashParam = new HashHitSearchParam(null, HASH_SET_1); - AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null, false); + AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_HASHSET_HIT, results.getArtifactType()); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); hashParam = new HashHitSearchParam(dataSource2.getId(), HASH_SET_1); - results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null, false); + results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_HASHSET_HIT, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -1058,13 +1058,13 @@ public class TableSearchTest extends NbTestCase { // Test keyword set hits AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO(); KeywordHitSearchParam kwParam = new KeywordHitSearchParam(null, KEYWORD_SET_1, "", "", TskData.KeywordSearchQueryType.LITERAL); - AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null, false); + AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_KEYWORD_HIT, results.getArtifactType()); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); kwParam = new KeywordHitSearchParam(dataSource2.getId(), KEYWORD_SET_1, "", "", TskData.KeywordSearchQueryType.LITERAL); - results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null, false); + results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_KEYWORD_HIT, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -1102,43 +1102,43 @@ public class TableSearchTest extends NbTestCase { // Get all text documents from data source 1 FileTypeExtensionsSearchParams param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_DOCUMENT_FILTER, dataSource1.getId()); - SearchResultsDTO results = viewsDAO.getFilesByExtension(param, 0, null, false); + SearchResultsDTO results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get Word documents from data source 1 param = new FileTypeExtensionsSearchParams(FileExtDocumentFilter.AUT_DOC_OFFICE, dataSource1.getId()); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get image/jpeg files from data source 1 param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_IMAGE_FILTER, dataSource1.getId()); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get text documents from all data sources param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_DOCUMENT_FILTER, null); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(4, results.getTotalResultsCount()); assertEquals(4, results.getItems().size()); // Get jpeg files from data source 2 param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_IMAGE_FILTER, dataSource2.getId()); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Search for file extensions that should produce no results param = new FileTypeExtensionsSearchParams(CustomRootFilter.EMPTY_RESULT_SET_FILTER, null); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get the custom file by extension param = new FileTypeExtensionsSearchParams(CustomRootFilter.CUSTOM_FILTER, null); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -1163,13 +1163,13 @@ public class TableSearchTest extends NbTestCase { // There are 4 hosts not associated with a person FileSystemPersonSearchParam personParam = new FileSystemPersonSearchParam(null); - BaseSearchResultsDTO results = fileSystemDAO.getHostsForTable(personParam, 0, null, false); + BaseSearchResultsDTO results = fileSystemDAO.getHostsForTable(personParam, 0, null); assertEquals(4, results.getTotalResultsCount()); assertEquals(4, results.getItems().size()); // Person1 is associated with two hosts personParam = new FileSystemPersonSearchParam(person1.getPersonId()); - results = fileSystemDAO.getHostsForTable(personParam, 0, null, false); + results = fileSystemDAO.getHostsForTable(personParam, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); @@ -1179,43 +1179,43 @@ public class TableSearchTest extends NbTestCase { // HostA is associated with two images FileSystemHostSearchParam hostParam = new FileSystemHostSearchParam(fsTestHostA.getHostId()); - results = fileSystemDAO.getContentForTable(hostParam, 0, null, false); + results = fileSystemDAO.getContentForTable(hostParam, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // ImageA has one volume system child, which has three volumes that will be displayed FileSystemContentSearchParam param = new FileSystemContentSearchParam(fsTestImageA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // VsA has three volume children (this should match the previous search) param = new FileSystemContentSearchParam(fsTestVsA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // VolumeA1 has a file system child, which in turn has a root directory child with three file children param = new FileSystemContentSearchParam(fsTestVolumeA1.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // FsA has a root directory child with three file children (this should match the previous search) param = new FileSystemContentSearchParam(fsTestFsA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // The root dir contains three files param = new FileSystemContentSearchParam(fsTestRootDirA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // ImageB has VS (which will display one volume), pool, and one local file children param = new FileSystemContentSearchParam(fsTestImageB.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); @@ -1226,13 +1226,13 @@ public class TableSearchTest extends NbTestCase { // fsTestVolumeB1 has pool and one local file children param = new FileSystemContentSearchParam(fsTestVolumeB1.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // fsTestPoolB has VS (which will display one volume) and local file children param = new FileSystemContentSearchParam(fsTestPoolB.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED index 86fd175181..854c57bed1 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED @@ -65,15 +65,19 @@ DayOfTheWeekRenderer_Tuesday_Label=Tuesday DayOfTheWeekRenderer_Wednesday_Label=Wednesday GeneralOptionsPanelController.moduleErr.msg=A module caused an error listening to GeneralOptionsPanelController updates. See log to determine which module. Some data could be incomplete. GeneralOptionsPanelController.moduleErr=Module Error +# {0} - errorMessage MultiUserTestTool.criticalError=Critical error running data source processor on test data source: {0} MultiUserTestTool.errorStartingIngestJob=Ingest manager error while starting ingest job +# {0} - cancellationReason MultiUserTestTool.ingestCancelled=Ingest cancelled due to {0} MultiUserTestTool.ingestSettingsError=Failed to analyze data source due to ingest settings errors MultiUserTestTool.noContent=Test data source failed to produce content +# {0} - serviceName MultiUserTestTool.serviceDown=Multi User service is down: {0} MultiUserTestTool.startupError=Failed to analyze data source due to ingest job startup error MultiUserTestTool.unableAddFileAsDataSource=Unable to add test file as data source to case MultiUserTestTool.unableCreatFile=Unable to create a file in case output directory +# {0} - serviceName MultiUserTestTool.unableToCheckService=Unable to check Multi User service state: {0} MultiUserTestTool.unableToCreateCase=Unable to create case MultiUserTestTool.unableToInitializeDatabase=Case database was not successfully initialized diff --git a/InternalPythonModules/GPX_Module/GPX_Parser_Module.py b/InternalPythonModules/GPX_Module/GPX_Parser_Module.py index 375652b6c4..a1deaa447e 100644 --- a/InternalPythonModules/GPX_Module/GPX_Parser_Module.py +++ b/InternalPythonModules/GPX_Module/GPX_Parser_Module.py @@ -134,7 +134,7 @@ class GPXParserFileIngestModule(FileIngestModule): # Create a GeoArtifactsHelper for this file. geoArtifactHelper = GeoArtifactsHelper( - self.skCase, self.moduleName, None, file, context.getJobId()) + self.skCase, self.moduleName, None, file, self.context.getJobId()) if self.writeDebugMsgs: self.log(Level.INFO, "Processing " + file.getUniquePath() + @@ -213,7 +213,7 @@ class GPXParserFileIngestModule(FileIngestModule): art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes) - self.blackboard.postArtifact(art, self.moduleName, context.getJobId()) + self.blackboard.postArtifact(art, self.moduleName, self.context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " + diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java index 4dfc014598..c01a81ff66 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java @@ -31,15 +31,18 @@ import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import org.netbeans.api.progress.ProgressHandle; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; +import org.openide.util.Cancellable; import org.openide.util.NbBundle; import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode; @@ -89,7 +92,8 @@ class AdHocSearchChildFactory extends ChildFactory { * Constructor * * @param queryRequests Query results - * @param saveResults Flag whether to save search results as KWS artifacts. + * @param saveResults Flag whether to save search results as KWS + * artifacts. */ AdHocSearchChildFactory(Collection queryRequests, boolean saveResults) { this.queryRequests = queryRequests; @@ -129,7 +133,7 @@ class AdHocSearchChildFactory extends ChildFactory { createFlatKeys(queryRequest.getQuery(), toPopulate); } - + // If there were no hits, make a single Node that will display that // no results were found. if (toPopulate.isEmpty()) { @@ -176,7 +180,7 @@ class AdHocSearchChildFactory extends ChildFactory { * Get file properties. */ Map properties = new LinkedHashMap<>(); - + /** * Add a snippet property, if available. */ @@ -204,7 +208,6 @@ class AdHocSearchChildFactory extends ChildFactory { properties.put(LOCATION.toString(), contentName); } - String hitName; BlackboardArtifact artifact = null; if (hit.isArtifactHit()) { @@ -414,21 +417,35 @@ class AdHocSearchChildFactory extends ChildFactory { this.saveResults = saveResults; } - protected void finalizeWorker() { - deregisterWriter(this); - EventQueue.invokeLater(progress::finish); - } - @Override protected Void doInBackground() throws Exception { - registerWriter(this); //register (synchronized on class) outside of writerLock to prevent deadlock - final String queryStr = query.getQueryString(); - final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr; try { - progress = ProgressHandle.createHandle(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), () -> BlackboardResultWriter.this.cancel(true)); - hits.process(progress, null, this, false, saveResults, null); + if (RuntimeProperties.runningWithGUI()) { + final String queryStr = query.getQueryString(); + final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr; + SwingUtilities.invokeLater(() -> { + progress = ProgressHandle.createHandle( + NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), + new Cancellable() { + @Override + public boolean cancel() { + //progress.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); + logger.log(Level.INFO, "Ad hoc search cancelled by user"); //NON-NLS + new Thread(() -> { + BlackboardResultWriter.this.cancel(true); + }).start(); + return true; + } + }); + }); + } + registerWriter(this); //register (synchronized on class) outside of writerLock to prevent deadlock + hits.process(this, false, saveResults, null); } finally { - finalizeWorker(); + deregisterWriter(this); + if (RuntimeProperties.runningWithGUI() && progress != null) { + EventQueue.invokeLater(progress::finish); + } } return null; } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java old mode 100644 new mode 100755 index 383abbd3af..bb2fbe189a --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -38,15 +38,15 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; -import org.netbeans.api.progress.aggregate.AggregateProgressFactory; -import org.netbeans.api.progress.aggregate.AggregateProgressHandle; -import org.netbeans.api.progress.aggregate.ProgressContributor; +import org.netbeans.api.progress.ProgressHandle; import org.openide.util.Cancellable; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -248,7 +248,8 @@ final class IngestSearchRunner { } /** - * Task to perform periodic searches for each job (does a single index commit first) + * Task to perform periodic searches for each job (does a single index + * commit first) */ private final class PeriodicSearchTask implements Runnable { @@ -296,24 +297,23 @@ final class IngestSearchRunner { NbBundle.getMessage(this.getClass(), "SearchRunner.Searcher.done.err.msg"), ex.getMessage())); }// catch and ignore if we were cancelled - catch (java.util.concurrent.CancellationException ex) { + catch (java.util.concurrent.CancellationException ex) { } } } stopWatch.stop(); logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS - + // calculate "hold off" time recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG - + // schedule next PeriodicSearchTask jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS); - + // exit this thread return; } - - + private void recalculateUpdateIntervalTime(long lastSerchTimeSec) { // If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) { @@ -321,7 +321,7 @@ final class IngestSearchRunner { } // double the search interval currentUpdateIntervalMs = currentUpdateIntervalMs * 2; - logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000}); + logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs / 1000}); return; } } @@ -447,13 +447,14 @@ final class IngestSearchRunner { /** * Searcher has private copies/snapshots of the lists and keywords */ - private SearchJobInfo job; - private List keywords; //keywords to search - private List keywordListNames; // lists currently being searched - private List keywordLists; - private Map keywordToList; //keyword to list name mapping - private AggregateProgressHandle progressGroup; - private final Logger logger = Logger.getLogger(IngestSearchRunner.Searcher.class.getName()); + private final SearchJobInfo job; + private final List keywords; //keywords to search + private final List keywordListNames; // lists currently being searched + private final List keywordLists; + private final Map keywordToList; //keyword to list name mapping + private final boolean usingNetBeansGUI; + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) + private ProgressHandle progressIndicator; private boolean finalRun = false; Searcher(SearchJobInfo job) { @@ -463,6 +464,7 @@ final class IngestSearchRunner { keywordToList = new HashMap<>(); keywordLists = new ArrayList<>(); //keywords are populated as searcher runs + usingNetBeansGUI = RuntimeProperties.runningWithGUI(); } Searcher(SearchJobInfo job, boolean finalRun) { @@ -473,76 +475,86 @@ final class IngestSearchRunner { @Override @Messages("SearchRunner.query.exception.msg=Error performing query:") protected Object doInBackground() throws Exception { - final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") - + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); - final String pgDisplayName = displayName + (" (" + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") + ")"); - progressGroup = AggregateProgressFactory.createSystemHandle(pgDisplayName, null, new Cancellable() { - @Override - public boolean cancel() { - logger.log(Level.INFO, "Cancelling the searcher by user."); //NON-NLS - if (progressGroup != null) { - progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); - } - progressGroup.finish(); - return IngestSearchRunner.Searcher.this.cancel(true); - } - }, null); - - updateKeywords(); - - ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; - int i = 0; - for (Keyword keywordQuery : keywords) { - subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm()); - progressGroup.addContributor(subProgresses[i]); - i++; - } - - progressGroup.start(); - final StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { - progressGroup.setDisplayName(displayName); - - int keywordsSearched = 0; + if (usingNetBeansGUI) { + /* + * If running in the NetBeans thick client application + * version of Autopsy, NetBeans progress handles (i.e., + * progress bars) are used to display search progress in the + * lower right hand corner of the main application window. + * + * A layer of abstraction to allow alternate representations + * of progress could be used here, as it is in other places + * in the application (see implementations and usage of + * org.sleuthkit.autopsy.progress.ProgressIndicator + * interface), to better decouple keyword search from the + * application's presentation layer. + */ + SwingUtilities.invokeAndWait(() -> { + final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); + progressIndicator = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + progressIndicator.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); + logger.log(Level.INFO, "Search cancelled by user"); //NON-NLS + new Thread(() -> { + IngestSearchRunner.Searcher.this.cancel(true); + }).start(); + return true; + } + }); + progressIndicator.start(); + progressIndicator.switchToIndeterminate(); + }); + } + updateKeywords(); for (Keyword keyword : keywords) { - if (this.isCancelled() || this.job.getJobContext().fileIngestIsCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS + if (isCancelled() || job.getJobContext().fileIngestIsCancelled()) { + logger.log(Level.INFO, "Cancellation requested, exiting before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS return null; } - final KeywordList keywordList = keywordToList.get(keyword); - - //new subProgress will be active after the initial query - //when we know number of hits to start() with - if (keywordsSearched > 0) { - subProgresses[keywordsSearched - 1].finish(); + KeywordList keywordList = keywordToList.get(keyword); + if (usingNetBeansGUI) { + String searchTermStr = keyword.getSearchTerm(); + if (searchTermStr.length() > 50) { + searchTermStr = searchTermStr.substring(0, 49) + "..."; + } + final String progressMessage = keywordList.getName() + ": " + searchTermStr; + SwingUtilities.invokeLater(() -> { + progressIndicator.progress(progressMessage); + }); } - KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList); - // Filtering //limit search to currently ingested data sources //set up a filter with 1 or more image ids OR'ed - final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId()); + KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList); + KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId()); keywordSearchQuery.addFilter(dataSourceFilter); - QueryResults queryResults; - // Do the actual search + QueryResults queryResults; try { queryResults = keywordSearchQuery.performQuery(); } catch (KeywordSearchModuleException | NoOpenCoreException ex) { logger.log(Level.SEVERE, "Error performing query: " + keyword.getSearchTerm(), ex); //NON-NLS - MessageNotifyUtil.Notify.error(Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(), ex.getCause().getMessage()); + if (usingNetBeansGUI) { + final String userMessage = Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(); + SwingUtilities.invokeLater(() -> { + MessageNotifyUtil.Notify.error(userMessage, ex.getCause().getMessage()); + }); + } //no reason to continue with next query if recovery failed //or wait for recovery to kick in and run again later //likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { - logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS + logger.log(Level.INFO, "Cancellation requested, exiting during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS return null; } @@ -551,42 +563,26 @@ final class IngestSearchRunner { QueryResults newResults = filterResults(queryResults); if (!newResults.getKeywords().isEmpty()) { - - // Write results to BB - //scale progress bar more more granular, per result sub-progress, within per keyword - int totalUnits = newResults.getKeywords().size(); - subProgresses[keywordsSearched].start(totalUnits); - int unitProgress = 0; - String queryDisplayStr = keyword.getSearchTerm(); - if (queryDisplayStr.length() > 50) { - queryDisplayStr = queryDisplayStr.substring(0, 49) + "..."; - } - subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress); - // Create blackboard artifacts - newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true, job.getJobId()); - - } //if has results - - //reset the status text before it goes away - subProgresses[keywordsSearched].progress(""); - - ++keywordsSearched; - - } //for each keyword - - } //end try block - catch (Exception ex) { - logger.log(Level.WARNING, "searcher exception occurred", ex); //NON-NLS - } finally { - try { - finalizeSearcher(); - stopWatch.stop(); - logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS - } finally { - // In case a thread is waiting on this worker to be done - job.searchNotify(); + newResults.process(this, keywordList.getIngestMessages(), true, job.getJobId()); + } } + } catch (Exception ex) { + logger.log(Level.WARNING, "Error occurred during keyword search", ex); //NON-NLS + } finally { + if (progressIndicator != null) { + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + progressIndicator.finish(); + progressIndicator = null; + } + }); + } + stopWatch.stop(); + logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS + // In case a thread is waiting on this worker to be done + job.searchNotify(); } return null; @@ -612,20 +608,6 @@ final class IngestSearchRunner { } } - /** - * Performs the cleanup that needs to be done right AFTER - * doInBackground() returns without relying on done() method that is not - * guaranteed to run. - */ - private void finalizeSearcher() { - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - progressGroup.finish(); - } - }); - } - /** * This method filters out all of the hits found in earlier periodic * searches and returns only the results found by the most recent diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java index 9806c81f2c..de1f6329a3 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; +import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import org.apache.commons.lang.StringUtils; import org.netbeans.api.progress.ProgressHandle; @@ -32,6 +33,7 @@ import org.netbeans.api.progress.aggregate.ProgressContributor; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestMessage; @@ -50,6 +52,8 @@ import org.sleuthkit.datamodel.TskCoreException; * about the search hits to the ingest inbox, and publishing an event to notify * subscribers of the blackboard posts. */ + + class QueryResults { private static final Logger logger = Logger.getLogger(QueryResults.class.getName()); @@ -130,10 +134,6 @@ class QueryResults { * All calls to the addResult method MUST be completed before calling this * method. * - * @param progress A progress indicator that reports the number of - * keywords processed. Can be null. - * @param subProgress A progress contributor that reports the keyword - * currently being processed. Can be null. * @param worker The SwingWorker that is being used to do the * processing, will be checked for task cancellation * before processing each keyword. @@ -144,19 +144,7 @@ class QueryResults { * @param ingestJobId The numeric identifier of the ingest job within which * the artifacts are being created, may be null. */ - void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) { - /* - * Initialize the progress indicator to the number of keywords that will - * be processed. - */ - if (null != progress) { - progress.start(getKeywords().size()); - } - - /* - * Process the keyword hits for each keyword. - */ - int keywordsProcessed = 0; + void process(SwingWorker worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) { final Collection hitArtifacts = new ArrayList<>(); for (final Keyword keyword : getKeywords()) { /* @@ -164,22 +152,7 @@ class QueryResults { */ if (worker.isCancelled()) { logger.log(Level.INFO, "Processing cancelled, exiting before processing search term {0}", keyword.getSearchTerm()); //NON-NLS - break; - } - - /* - * Update the progress indicator and the show the current keyword - * via the progress contributor. - */ - if (progress != null) { - progress.progress(keyword.toString(), keywordsProcessed); - } - if (subProgress != null) { - String hitDisplayStr = keyword.getSearchTerm(); - if (hitDisplayStr.length() > 50) { - hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; - } - subProgress.progress(query.getKeywordList().getName() + ": " + hitDisplayStr, keywordsProcessed); + return; } /* @@ -201,7 +174,7 @@ class QueryResults { snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !query.isLiteral(), true); } catch (NoOpenCoreException e) { logger.log(Level.SEVERE, "Solr core closed while executing snippet query " + snippetQuery, e); //NON-NLS - break; // Stop processing. + return; // Stop processing. } catch (Exception e) { logger.log(Level.SEVERE, "Error executing snippet query " + snippetQuery, e); //NON-NLS continue; // Try processing the next hit. @@ -241,8 +214,6 @@ class QueryResults { } } } - - ++keywordsProcessed; } /* @@ -297,69 +268,74 @@ class QueryResults { * @throws TskCoreException If there is a problem generating or send the * inbox message. */ - private void writeSingleFileInboxMessage(BlackboardArtifact artifact, Content hitContent) throws TskCoreException { - StringBuilder subjectSb = new StringBuilder(1024); - if (!query.isLiteral()) { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); - } else { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); - } + private void writeSingleFileInboxMessage(final BlackboardArtifact artifact, final Content hitContent) throws TskCoreException { + if (artifact != null && hitContent != null && RuntimeProperties.runningWithGUI()) { + final StringBuilder subjectSb = new StringBuilder(1024); + if (!query.isLiteral()) { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); + } else { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); + } - StringBuilder detailsSb = new StringBuilder(1024); - String uniqueKey = null; - BlackboardAttribute attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD)); - if (attr != null) { - final String keyword = attr.getValueString(); - subjectSb.append(keyword); - uniqueKey = keyword.toLowerCase(); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } + final StringBuilder detailsSb = new StringBuilder(1024); + String uniqueKey = null; + BlackboardAttribute attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD)); + if (attr != null) { + final String keyword = attr.getValueString(); + subjectSb.append(keyword); + uniqueKey = keyword.toLowerCase(); + detailsSb.append("
").append(EscapeUtil.escapeHtml(keyword)).append("
"); //NON-NLS + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } - //preview - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //file - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); - if (hitContent instanceof AbstractFile) { - AbstractFile hitFile = (AbstractFile) hitContent; - detailsSb.append(""); //NON-NLS - } else { - detailsSb.append(""); //NON-NLS - } - detailsSb.append(""); //NON-NLS - - //list - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //regex - if (!query.isLiteral()) { - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP)); + //preview + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); if (attr != null) { detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + + //file + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); + if (hitContent instanceof AbstractFile) { + AbstractFile hitFile = (AbstractFile) hitContent; + detailsSb.append(""); //NON-NLS + } else { + detailsSb.append(""); //NON-NLS + } + detailsSb.append(""); //NON-NLS + + //list + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); detailsSb.append(""); //NON-NLS detailsSb.append(""); //NON-NLS } - } - detailsSb.append("
").append(EscapeUtil.escapeHtml(keyword)).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("").append(hitContent.getName()).append("
").append(attr.getValueString()).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("").append(hitContent.getName()).append("
").append(attr.getValueString()).append("
"); //NON-NLS - IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), uniqueKey, artifact)); + //regex + if (!query.isLiteral()) { + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); + detailsSb.append("").append(attr.getValueString()).append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + } + detailsSb.append(""); //NON-NLS + + final String key = uniqueKey; // Might be null, but that's supported. + SwingUtilities.invokeLater(() -> { + IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), key, artifact)); + }); + } } } diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index cd253dc3cb..e9620c3d7a 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Thu, 30 Sep 2021 19:36:31 -0400 +#Tue, 30 Nov 2021 17:19:50 -0500 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 2d02262803..f591caf623 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Thu, 30 Sep 2021 19:36:31 -0400 +#Tue, 30 Nov 2021 17:19:50 -0500 CTL_MainWindow_Title=Autopsy 4.19.2 CTL_MainWindow_Title_No_Project=Autopsy 4.19.2