From be57ecd677bf4add0c178f79f7c759e5084c3838 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 26 Oct 2021 15:25:49 -0400 Subject: [PATCH 001/142] 7895 CR data ingest module --- .../eventlisteners/IngestEventsListener.java | 479 +----------------- .../CentralRepoDataArtifactIngestModule.java | 390 +++++++++++++- ....java => CentralRepoFileIngestModule.java} | 39 +- .../CentralRepoIngestModuleFactory.java | 6 +- .../ingestmodule/IngestSettings.java | 8 +- 5 files changed, 385 insertions(+), 537 deletions(-) rename Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/{CentralRepoIngestModule.java => CentralRepoFileIngestModule.java} (90%) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java index 68dcb8e456..16a2b459d1 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java @@ -21,75 +21,39 @@ package org.sleuthkit.autopsy.centralrepository.eventlisteners; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; -import static java.lang.Boolean.FALSE; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.EnumSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.List; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.logging.Level; -import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.autopsy.coreutils.ThreadUtils; -import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisEvent; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; -import org.sleuthkit.datamodel.DataArtifact; -import org.sleuthkit.datamodel.Score; -import org.sleuthkit.datamodel.TskData; /** - * Listen for ingest events and update entries in the Central Repository + * Listen for ingest job events and update entries in the Central Repository * database accordingly */ -@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Central Repository"}) public class IngestEventsListener { private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName()); private static final Set INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED); - private static final Set INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(DATA_ADDED); - private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name(); - private static int correlationModuleInstanceCount; - private static boolean flagNotableItems; - private static boolean flagSeenDevices; - private static boolean createCrProperties; - private static boolean flagUniqueArtifacts; private static final String INGEST_EVENT_THREAD_NAME = "Ingest-Event-Listener-%d"; private final ExecutorService jobProcessingExecutor; - private final PropertyChangeListener pcl1 = new IngestModuleEventListener(); private final PropertyChangeListener pcl2 = new IngestJobEventListener(); - final Collection recentlyAddedCeArtifacts = new LinkedHashSet<>(); - - static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10; - static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20; public IngestEventsListener() { jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(INGEST_EVENT_THREAD_NAME).build()); @@ -103,7 +67,6 @@ public class IngestEventsListener { * Add all of our Ingest Event Listeners to the IngestManager Instance. */ public void installListeners() { - IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl1); IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl2); } @@ -111,292 +74,9 @@ public class IngestEventsListener { * Remove all of our Ingest Event Listeners from the IngestManager Instance. */ public void uninstallListeners() { - IngestManager.getInstance().removeIngestModuleEventListener(pcl1); IngestManager.getInstance().removeIngestJobEventListener(pcl2); } - /** - * Increase the number of IngestEventsListeners adding contents to the - * Central Repository. - */ - public synchronized static void incrementCorrelationEngineModuleCount() { - correlationModuleInstanceCount++; //Should be called once in the Central Repository module's startup method. - } - - /** - * Decrease the number of IngestEventsListeners adding contents to the - * Central Repository. - */ - public synchronized static void decrementCorrelationEngineModuleCount() { - if (getCeModuleInstanceCount() > 0) { //prevent it ingestJobCounter from going negative - correlationModuleInstanceCount--; //Should be called once in the Central Repository module's shutdown method. - } - } - - /** - * Reset the counter which keeps track of if the Central Repository Module - * is being run during injest to 0. - */ - synchronized static void resetCeModuleInstanceCount() { - correlationModuleInstanceCount = 0; //called when a case is opened in case for some reason counter was not reset - } - - /** - * Whether or not the Central Repository Module is enabled for any of the - * currently running ingest jobs. - * - * @return boolean True for Central Repository enabled, False for disabled - */ - public synchronized static int getCeModuleInstanceCount() { - return correlationModuleInstanceCount; - } - - /** - * Are notable items being flagged? - * - * @return True if flagging notable items; otherwise false. - */ - public synchronized static boolean isFlagNotableItems() { - return flagNotableItems; - } - - /** - * Are previously seen devices being flagged? - * - * @return True if flagging seen devices; otherwise false. - */ - public synchronized static boolean isFlagSeenDevices() { - return flagSeenDevices; - } - - /** - * Are correlation properties being created - * - * @return True if creating correlation properties; otherwise false. - */ - public synchronized static boolean shouldCreateCrProperties() { - return createCrProperties; - } - - /** - * Configure the listener to flag notable items or not. - * - * @param value True to flag notable items; otherwise false. - */ - public synchronized static void setFlagNotableItems(boolean value) { - flagNotableItems = value; - } - - /** - * Configure the listener to flag previously seen devices or not. - * - * @param value True to flag seen devices; otherwise false. - */ - public synchronized static void setFlagSeenDevices(boolean value) { - flagSeenDevices = value; - } - - /** - * Configure the listener to flag unique apps or not. - * - * @param value True to flag unique apps; otherwise false. - */ - public synchronized static void setFlagUniqueArtifacts(boolean value) { - flagUniqueArtifacts = value; - } - - /** - * Are unique apps being flagged? - * - * @return True if flagging unique apps; otherwise false. - */ - public synchronized static boolean isFlagUniqueArtifacts() { - return flagUniqueArtifacts; - } - - /** - * Configure the listener to create correlation properties - * - * @param value True to create properties; otherwise false. - */ - public synchronized static void setCreateCrProperties(boolean value) { - createCrProperties = value; - } - - /** - * Make a "previously seen" artifact based on a new artifact being - * previously seen. - * - * @param originalArtifact Original artifact that we want to flag - * @param caseDisplayNames List of case names artifact was previously seen - * in - * @param aType The correlation type. - * @param value The correlation value. - */ - @NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", - "IngestEventsListener.prevCaseComment.text=Previous Case: "}) - static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List caseDisplayNames, - CorrelationAttributeInstance.Type aType, String value) { - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously marked as notable in cases " + prevCases; - Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.IngestEventsListener_prevTaggedSet_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevTaggedSet_text(), - Score.SCORE_NOTABLE, justification); - } - - /** - * Create a "previously seen" hit for a device which was previously seen in - * the central repository. NOTE: Artifacts that are too common will be - * skipped. - * - * @param originalArtifact the artifact to create the "previously seen" item - * for - * @param caseDisplayNames the case names the artifact was previously seen - * in - * @param aType The correlation type. - * @param value The correlation value. - */ - @NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)", - "# {0} - typeName", - "# {1} - count", - "IngestEventsListener.prevCount.text=Number of previous {0}: {1}"}) - static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List caseDisplayNames, - CorrelationAttributeInstance.Type aType, String value) { - - // calculate score - Score score; - int numCases = caseDisplayNames.size(); - if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) { - score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) { - score = Score.SCORE_NONE; - } else { - // don't make an Analysis Result, the artifact is too common. - return; - } - - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously seen in cases " + prevCases; - Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.IngestEventsListener_prevExists_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(), - score, justification); - } - - /** - * Create a "previously unseen" hit for an application which was never seen - * in the central repository. - * - * @param originalArtifact the artifact to create the "previously unseen" - * item for - * @param aType The correlation type. - * @param value The correlation value. - */ - static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) { - Collection attributesForNewArtifact = Arrays.asList( - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value)); - makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "", - Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before"); - } - - /** - * Make an artifact to flag the passed in artifact. - * - * @param newArtifactType Type of artifact to create. - * @param originalArtifact Artifact in current case we want to flag - * @param attributesForNewArtifact Attributes to assign to the new artifact - * @param configuration The configuration to be specified for the - * new artifact hit - * @param score sleuthkit.datamodel.Score to be assigned - * to this artifact - * @param justification Justification string - */ - private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection attributesForNewArtifact, String configuration, - Score score, String justification) { - try { - SleuthkitCase tskCase = originalArtifact.getSleuthkitCase(); - Blackboard blackboard = tskCase.getBlackboard(); - // Create artifact if it doesn't already exist. - BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID()); - if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) { - BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult( - newArtifactType, score, - null, configuration, justification, attributesForNewArtifact) - .getAnalysisResult(); - - try { - // index the artifact for keyword search - blackboard.postArtifact(newArtifact, MODULE_NAME); - } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS - } - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS - } catch (IllegalStateException ex) { - LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS - } - } - - private class IngestModuleEventListener implements PropertyChangeListener { - - @Override - public void propertyChange(PropertyChangeEvent evt) { - //if ingest is running we want there to check if there is a Central Repository module running - //sometimes artifacts are generated by DSPs or other sources while ingest is not running - //in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate - if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) { - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex); - return; - } - switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) { - case DATA_ADDED: { - //if ingest isn't running create the "previously seen" items, - // otherwise use the ingest module setting to determine if we create "previously seen" items - boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems(); - boolean flagPrevious = !IngestManager.getInstance().isIngestRunning() || isFlagSeenDevices(); - boolean createAttributes = !IngestManager.getInstance().isIngestRunning() || shouldCreateCrProperties(); - boolean flagUnique = !IngestManager.getInstance().isIngestRunning() || isFlagUniqueArtifacts(); - jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable, flagPrevious, createAttributes, flagUnique)); - break; - } - default: - break; - } - } - } - } - private class IngestJobEventListener implements PropertyChangeListener { @Override @@ -433,12 +113,6 @@ public class IngestEventsListener { @Override public void run() { - // clear the tracker to reduce memory usage - if (getCeModuleInstanceCount() == 0) { - recentlyAddedCeArtifacts.clear(); - } - //else another instance of the Central Repository Module is still being run. - /* * Ensure the data source in the Central Repository has hash values * that match those in the case database. @@ -517,158 +191,7 @@ public class IngestEventsListener { "Unable to fetch data from the case database for data source '%s' (obj_id=%d)", dataSourceName, dataSourceObjectId), ex); } - } // DATA_SOURCE_ANALYSIS_COMPLETED - } - - private final class DataAddedTask implements Runnable { - - private final CentralRepository dbManager; - private final PropertyChangeEvent event; - private final boolean flagNotableItemsEnabled; - private final boolean flagPreviousItemsEnabled; - private final boolean createCorrelationAttributes; - private final boolean flagUniqueItemsEnabled; - - private DataAddedTask(CentralRepository db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes, boolean flagUnique) { - this.dbManager = db; - this.event = evt; - this.flagNotableItemsEnabled = flagNotableItemsEnabled; - this.flagPreviousItemsEnabled = flagPreviousItemsEnabled; - this.createCorrelationAttributes = createCorrelationAttributes; - this.flagUniqueItemsEnabled = flagUnique; - } - - @Override - public void run() { - if (!CentralRepository.isEnabled()) { - return; - } - final ModuleDataEvent mde = (ModuleDataEvent) event.getOldValue(); - Collection bbArtifacts = mde.getArtifacts(); - if (null == bbArtifacts) { //the ModuleDataEvents don't always have a collection of artifacts set - return; - } - List eamArtifacts = new ArrayList<>(); - - for (BlackboardArtifact bbArtifact : bbArtifacts) { - // makeCorrAttrToSave will filter out artifacts which should not be sources of CR data. - List convertedArtifacts = new ArrayList<>(); - if (bbArtifact instanceof DataArtifact) { - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact)); - } - for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { - try { - // Only do something with this artifact if it's unique within the job - if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) { - - // Get a list of instances for a given value (hash, email, etc.) - List previousOccurrences = new ArrayList<>(); - // check if we are flagging things - if (flagNotableItemsEnabled || flagPreviousItemsEnabled || flagUniqueItemsEnabled) { - try { - previousOccurrences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - - // make sure the previous instances do not contain current case - for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { - CorrelationAttributeInstance instance = iterator.next(); - if (instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) { - // this is the current case - remove the instace from the previousOccurrences list - iterator.remove(); - } - } - } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.INFO, String.format("Unable to flag previously seen device: %s.", eamArtifact.toString()), ex); - } - } - - // Was it previously marked as bad? - // query db for artifact instances having this TYPE/VALUE and knownStatus = "Bad". - // if getKnownStatus() is "Unknown" and this artifact instance was marked bad in a previous case, - // create TSK_PREVIOUSLY_SEEN artifact on BB. - if (flagNotableItemsEnabled) { - List caseDisplayNames = getCaseDisplayNamesForNotable(previousOccurrences); - if (!caseDisplayNames.isEmpty()) { - makeAndPostPreviousNotableArtifact(bbArtifact, - caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - - // if we have marked this artifact as notable, then skip the analysis of whether it was previously seen - continue; - } - } - - // flag previously seen devices and communication accounts (emails, phones, etc) - if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty() - && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) { - - List caseDisplayNames = getCaseDisplayNames(previousOccurrences); - makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - } - - // flag previously unseen apps and domains - if (flagUniqueItemsEnabled - && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID - || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { - - if (previousOccurrences.isEmpty()) { - makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); - } - } - if (createCorrelationAttributes) { - eamArtifacts.add(eamArtifact); - } - } - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error counting notable artifacts.", ex); - } - } - } - if (FALSE == eamArtifacts.isEmpty()) { - for (CorrelationAttributeInstance eamArtifact : eamArtifacts) { - try { - dbManager.addArtifactInstance(eamArtifact); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error adding artifact to database.", ex); //NON-NLS - } - } - } // DATA_ADDED } } - /** - * Gets case display names for a list of CorrelationAttributeInstance. - * - * @param occurrences List of CorrelationAttributeInstance - * - * @return List of case display names - */ - private List getCaseDisplayNames(List occurrences) { - List caseNames = new ArrayList<>(); - for (CorrelationAttributeInstance occurrence : occurrences) { - caseNames.add(occurrence.getCorrelationCase().getDisplayName()); - } - return caseNames; - } - - /** - * Gets case display names for only occurrences marked as NOTABLE/BAD. - * - * @param occurrences List of CorrelationAttributeInstance - * - * @return List of case display names of NOTABLE/BAD occurrences - */ - private List getCaseDisplayNamesForNotable(List occurrences) { - List caseNames = new ArrayList<>(); - for (CorrelationAttributeInstance occurrence : occurrences) { - if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) { - caseNames.add(occurrence.getCorrelationCase().getDisplayName()); - } - } - return caseNames; - } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index 0ed9f53518..abd9dae8cc 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2021 Basis Technology Corp. + * Copyright 2021-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,37 +18,393 @@ */ package org.sleuthkit.autopsy.centralrepository.ingestmodule; -import java.util.concurrent.atomic.AtomicLong; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.logging.Level; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.IngestJobContext; +import org.sleuthkit.datamodel.AnalysisResult; +import org.sleuthkit.datamodel.Blackboard; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; +import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.Score; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; /** - * RJCTODO - * - * NOTE TO REVIEWER: - * - * This is a placeholder data artifact ingest module that counts the number of - * data artifacts it processes and posts the final count to the ingest inbox. - * The guts of the module will be supplied by a later PR. + * A data artifact ingest module that adds correlation attributes for a data + * artifact to the central repository and makes analysis results based on + * previous occurences. When the ingest job is completed, ensures the data + * source in the central repository has hash values that match those in the case + * database. */ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule { - private final AtomicLong artifactCounter = new AtomicLong(); + private static final Logger logger = Logger.getLogger(CorrelationAttributeInstance.class.getName()); + private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + private final Set corrAttrsAlreadyProcessed = new LinkedHashSet<>(); + private final boolean saveCorrelationAttrs; + private final boolean flagNotableItems; + private final boolean flagSeenDevices; + private final boolean flagUniqueArtifacts; + private Case currentCase; + private Blackboard blackboard; + private CentralRepository centralRepo; + private Content dataSource; + private long ingestJobId; + + /** + * Constructs a data artifact ingest module that adds correlation attributes + * for a data artifact to the central repository and makes analysis results + * based on previous occurences. When the ingest job is completed, ensures + * the data source in the central repository has hash values that match + * those in the case database. + * + * @param settings The ingest job settings for this module. + */ + CentralRepoDataArtifactIngestModule(IngestSettings settings) { + saveCorrelationAttrs = settings.shouldCreateCorrelationProperties(); + flagNotableItems = settings.isFlagTaggedNotableItems(); + flagSeenDevices = settings.isFlagPreviousDevices(); + flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); + } + + @Override + public void startUp(IngestJobContext context) throws IngestModuleException { + dataSource = context.getDataSource(); + ingestJobId = context.getJobId(); + if (!CentralRepository.isEnabled()) { + throw new IngestModuleException("Central repository required, but not enabled"); + } + try { + currentCase = Case.getCurrentCaseThrows(); + blackboard = currentCase.getSleuthkitCase().getBlackboard(); + centralRepo = CentralRepository.getInstance(); + } catch (NoCurrentCaseException ex) { + throw new IngestModuleException("Error getting current case", ex); + } catch (CentralRepoException ex) { + throw new IngestModuleException("Error accessing central repository", ex); + } + } @Override public ProcessResult process(DataArtifact artifact) { - artifactCounter.incrementAndGet(); + List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(artifact); + for (CorrelationAttributeInstance corrAttr : corrAttrs) { + if (!corrAttrsAlreadyProcessed.add(corrAttr.toString())) { + continue; + } + + if (flagNotableItems || flagSeenDevices || flagUniqueArtifacts) { + makeAnalysisResults(artifact, corrAttr); + } + + if (saveCorrelationAttrs) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error doing bulk add of correlation attribute to central repository (%s) ", corrAttr), ex); //NON-NLS + } + } + } return ProcessResult.OK; } + /** + * Makes analysis results for a data artifact based on previous occurences, + * if any, of a correlation attribute. + * + * @param artifact The data artifact. + * @param corrAttr A correlation attribute for the data artifact. + */ + private void makeAnalysisResults(DataArtifact artifact, CorrelationAttributeInstance corrAttr) { + List previousOccurrences = getPreviousOccurrences(corrAttr); + if (previousOccurrences.isEmpty()) { + return; + } + + /* + * Make a previously notable analysis result for the data artifact if + * the correlation attribute has been seen in another case and marked as + * notable (TskData.FileKnown.BAD). + */ + if (flagNotableItems) { + List previousCaseNames = new ArrayList<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) { + previousCaseNames.add(occurrence.getCorrelationCase().getDisplayName()); // Dups are removed later + } + } + if (!previousCaseNames.isEmpty()) { + makePreviousNotableAnalysisResult(artifact, previousCaseNames, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + } + } + + /* + * Make a previously seen analysis result result for the data artifact + * if the correlation attribute has been seen in another case and is a + * device or communication account attribute. + */ + if (flagSeenDevices && !previousOccurrences.isEmpty() + && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) { + List previousCaseNames = new ArrayList<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + previousCaseNames.add(occurrence.getCorrelationCase().getDisplayName()); // Dups are removed later + } + if (!previousCaseNames.isEmpty()) { + makePreviouslySeenAnalysisResult(artifact, previousCaseNames, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + } + } + + /* + * Make a previously unseen analysis result result for the data artifact + * if the correlation attribute has not been seen in another case and is + * an app name or domain name attribute. + */ + if (flagUniqueArtifacts + && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID + || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { + makeAndPostPreviouslyUnseenArtifact(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + } + } + + /** + * Gets any previous occurrences of a given correlation attribute in cases + * other than the current case. + * + * @param corrAttr The correlation attribute. + * + * @return The other occurrences of the correlation attribute. + */ + private List getPreviousOccurrences(CorrelationAttributeInstance corrAttr) { + List previousOccurrences = new ArrayList<>(); + try { + previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { + CorrelationAttributeInstance instance = iterator.next(); + if (instance.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) { + iterator.remove(); + } + } + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.SEVERE, String.format("Error normalizing correlation attribute value (s)", corrAttr), ex); // NON-NLS + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute (s)", corrAttr), ex); // NON-NLS + } + return previousOccurrences; + } + + /** + * Makes a previously notable analysis result for a data artifact. + * + * @param artifact The data artifact. + * @param previousCases The names of the cases in which the artifact was + * deemed notable. + * @param corrAttrType The type of the matched correlation attribute. + * @param corrAttrValue The value of the matched correlation attribute. + */ + @NbBundle.Messages({ + "CrDataArtifactIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)", + "# {0} - list of cases", + "CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0}" + }) + private void makePreviousNotableAnalysisResult(DataArtifact artifact, List previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { + String prevCases = previousCases.stream().distinct().collect(Collectors.joining(",")); + String justification = Bundle.CrDataArtifactIngestModule_notableJustification(prevCases); + Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CrDataArtifactIngestModule_notableSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification); + } + + /** + * Makes a previously seen analysis result for a data artifact, unless the + * artifact is too common. + * + * @param artifact The data artifact. + * @param previousCases The names of the cases in which the artifact was + * previously seen. + * @param corrAttrType The type of the matched correlation attribute. + * @param corrAttrValue The value of the matched correlation attribute. + */ + @NbBundle.Messages({ + "CrDataArtifactIngestModule_prevSeenSetName=Previously Seen (Central Repository)", + "# {0} - list of cases", + "CrDataArtifactIngestModule_prevSeenJustification=Previously seen in cases {0}" + }) + private void makePreviouslySeenAnalysisResult(DataArtifact artifact, List previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { + Score score; + int numCases = previousCases.size(); + if (numCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { + score = Score.SCORE_LIKELY_NOTABLE; + } else if (numCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { + score = Score.SCORE_NONE; + } else { + /* + * Don't make the analysis result, the artifact is too common. + */ + return; + } + + String prevCases = previousCases.stream().distinct().collect(Collectors.joining(",")); + String justification = Bundle.CrDataArtifactIngestModule_prevSeenJustification(prevCases); + Collection analysisResultAttributes = Arrays.asList( + new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CrDataArtifactIngestModule_prevSeenSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score, justification); + } + + /** + * Makes a previously unseen analysis result for a data artifact. + * + * @param artifact The data artifact. + * @param corrAttrType The type of the new correlation attribute. + * @param corrAttrValue The value of the new correlation attribute. + */ + @NbBundle.Messages({ + "CrDataArtifactIngestModule_prevUnseenJustification=Previously seen in zero cases" + }) + private void makeAndPostPreviouslyUnseenArtifact(DataArtifact artifact, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { + Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( + TSK_CORRELATION_TYPE, MODULE_NAME, + corrAttrType.getDisplayName()), + new BlackboardAttribute( + TSK_CORRELATION_VALUE, MODULE_NAME, + corrAttrValue)); + makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification); + } + + /** + * Makes a new analysis result of a given type for a data artifact and posts + * it to the blackboard. + * + * @param artifact The data artifact. + * @param analysisResultType The type of analysis result to make. + * @param analysisResultAttrs The attributes of the new analysis result. + * @param configuration The configuration for the new analysis result. + * @param score The score for the new analysis result. + * @param justification The justification for the new analysis result. + */ + private void makeAndPostAnalysisResult(DataArtifact artifact, BlackboardArtifact.Type analysisResultType, Collection analysisResultAttrs, String configuration, Score score, String justification) { + try { + if (!blackboard.artifactExists(artifact, analysisResultType, analysisResultAttrs)) { + AnalysisResult analysisResult = artifact.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs).getAnalysisResult(); + try { + blackboard.postArtifact(analysisResult, CentralRepoIngestModuleFactory.getModuleName(), ingestJobId); + } catch (Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, String.format("Error posting analysis result to blackboard (*s)", analysisResult), ex); //NON-NLS + } + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error creating analysis result", ex); // NON-NLS + } + } + @Override public void shutDown() { - IngestServices.getInstance().postMessage(IngestMessage.createMessage( - IngestMessage.MessageType.INFO, - CentralRepoIngestModuleFactory.getModuleName(), - String.format("%d data artifacts processed", artifactCounter.get()))); //NON-NLS + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error doing final bulk commit of correlation attributes", ex); // NON-NLS + } + /* + * Data artifact ingest modules are shut down at the end of the ingest + * job. Now that the job is complete ensures the data source in the + * central repository has hash values that match those in the case + * database. + */ + syncDataSourceHashes(); + } + + /** + * Ensures the data source in the central repository has hash values that + * match those in the case database. + */ + private void syncDataSourceHashes() { + if (!(dataSource instanceof Image)) { + return; + } + + try { + CorrelationCase correlationCase = centralRepo.getCase(currentCase); + if (correlationCase == null) { + correlationCase = centralRepo.newCase(currentCase); + } + + CorrelationDataSource correlationDataSource = centralRepo.getDataSource(correlationCase, dataSource.getId()); + if (correlationDataSource == null) { + correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource); + } + + Image image = (Image) dataSource; + String imageMd5Hash = image.getMd5(); + if (imageMd5Hash == null) { + imageMd5Hash = ""; + } + String crMd5Hash = correlationDataSource.getMd5(); + if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) { + correlationDataSource.setMd5(imageMd5Hash); + } + + String imageSha1Hash = image.getSha1(); + if (imageSha1Hash == null) { + imageSha1Hash = ""; + } + String crSha1Hash = correlationDataSource.getSha1(); + if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) { + correlationDataSource.setSha1(imageSha1Hash); + } + + String imageSha256Hash = image.getSha256(); + if (imageSha256Hash == null) { + imageSha256Hash = ""; + } + String crSha256Hash = correlationDataSource.getSha256(); + if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) { + correlationDataSource.setSha256(imageSha256Hash); + } + + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java similarity index 90% rename from Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java rename to Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index 26d262b4c6..497a1fcb7f 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2011-2021 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -66,7 +66,7 @@ import org.sleuthkit.datamodel.Score; */ @Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) -final class CentralRepoIngestModule implements FileIngestModule { +final class CentralRepoFileIngestModule implements FileIngestModule { private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false; @@ -74,7 +74,7 @@ final class CentralRepoIngestModule implements FileIngestModule { static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false; static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; - private final static Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName()); + private final static Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); @@ -83,21 +83,17 @@ final class CentralRepoIngestModule implements FileIngestModule { private CorrelationDataSource eamDataSource; private CorrelationAttributeInstance.Type filesType; private final boolean flagTaggedNotableItems; - private final boolean flagPreviouslySeenDevices; private Blackboard blackboard; private final boolean createCorrelationProperties; - private final boolean flagUniqueArtifacts; /** * Instantiate the Central Repository ingest module. * * @param settings The ingest settings for the module instance. */ - CentralRepoIngestModule(IngestSettings settings) { + CentralRepoFileIngestModule(IngestSettings settings) { flagTaggedNotableItems = settings.isFlagTaggedNotableItems(); - flagPreviouslySeenDevices = settings.isFlagPreviousDevices(); createCorrelationProperties = settings.shouldCreateCorrelationProperties(); - flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); } @Override @@ -231,33 +227,6 @@ final class CentralRepoIngestModule implements FileIngestModule { public void startUp(IngestJobContext context) throws IngestModuleException { IngestEventsListener.incrementCorrelationEngineModuleCount(); - /* - * Tell the IngestEventsListener to flag notable items based on the - * current module's configuration. This is a work around for the lack of - * an artifacts pipeline. Note that this can be changed by another - * module instance. All modules are affected by the value. While not - * ideal, this will be good enough until a better solution can be - * posited. - * - * Note: Flagging cannot be disabled if any other instances of the - * Central Repository module are running. This restriction is to prevent - * missing results in the case where the first module is flagging - * notable items, and the proceeding module (with flagging disabled) - * causes the first to stop flagging. - */ - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagNotableItems()) { - IngestEventsListener.setFlagNotableItems(flagTaggedNotableItems); - } - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagSeenDevices()) { - IngestEventsListener.setFlagSeenDevices(flagPreviouslySeenDevices); - } - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.shouldCreateCrProperties()) { - IngestEventsListener.setCreateCrProperties(createCorrelationProperties); - } - if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) { - IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts); - } - if (CentralRepository.isEnabled() == false) { /* * Not throwing the customary exception for now. This is a diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java index ddece9731c..9c52e2a2e0 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java @@ -72,14 +72,14 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter { @Override public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) { if (settings instanceof IngestSettings) { - return new CentralRepoIngestModule((IngestSettings) settings); + return new CentralRepoFileIngestModule((IngestSettings) settings); } /* * Earlier versions of the modules had no ingest job settings. Create a * module with the default settings. */ if (settings instanceof NoIngestModuleIngestJobSettings) { - return new CentralRepoIngestModule((IngestSettings) getDefaultIngestJobSettings()); + return new CentralRepoFileIngestModule((IngestSettings) getDefaultIngestJobSettings()); } throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings"); } @@ -128,7 +128,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter { @Override public DataArtifactIngestModule createDataArtifactIngestModule(IngestModuleIngestJobSettings settings) { - return new CentralRepoDataArtifactIngestModule(); + return new CentralRepoDataArtifactIngestModule((IngestSettings) settings); } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java index e55f09882e..a977da13f2 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java @@ -36,10 +36,10 @@ final class IngestSettings implements IngestModuleIngestJobSettings { * Instantiate the ingest job settings with default values. */ IngestSettings() { - this.flagTaggedNotableItems = CentralRepoIngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS; - this.flagPreviousDevices = CentralRepoIngestModule.DEFAULT_FLAG_PREVIOUS_DEVICES; - this.createCorrelationProperties = CentralRepoIngestModule.DEFAULT_CREATE_CR_PROPERTIES; - this.flagUniqueArtifacts = CentralRepoIngestModule.DEFAULT_FLAG_UNIQUE_DEVICES; + this.flagTaggedNotableItems = CentralRepoFileIngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS; + this.flagPreviousDevices = CentralRepoFileIngestModule.DEFAULT_FLAG_PREVIOUS_DEVICES; + this.createCorrelationProperties = CentralRepoFileIngestModule.DEFAULT_CREATE_CR_PROPERTIES; + this.flagUniqueArtifacts = CentralRepoFileIngestModule.DEFAULT_FLAG_UNIQUE_DEVICES; } /** From f2f3c98f51e2db38ccc29ca2959f93301a0cf0c9 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 1 Nov 2021 16:10:23 -0400 Subject: [PATCH 002/142] 7895 CR data artifact ingest module --- .../datamodel/Bundle.properties-MERGED | 1 + .../datamodel/CentralRepositoryService.java | 33 ++- .../eventlisteners/Bundle.properties-MERGED | 7 - .../eventlisteners/CaseEventListener.java | 258 +++++++++++------- .../ingestmodule/Bundle.properties-MERGED | 7 + .../CentralRepoDataArtifactIngestModule.java | 21 +- .../CentralRepoFileIngestModule.java | 4 - 7 files changed, 203 insertions(+), 128 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED index 724758847b..a1f1432a70 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Bundle.properties-MERGED @@ -12,6 +12,7 @@ CentralRepoDbChoice.PostgreSQL.Text=Custom PostgreSQL CentralRepoDbChoice.PostgreSQL_Multiuser.Text=PostgreSQL using multi-user settings CentralRepoDbChoice.Sqlite.Text=SQLite CentralRepoDbManager.connectionErrorMsg.text=Failed to connect to central repository database. +CentralRepositoryService.progressMsg.startingListener=Starting events listener... CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates... CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database.... CentralRepositoryService.serviceName=Central Repository Service diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java index 649e9e5130..e2f20524e3 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepositoryService.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2018-2020 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,7 +35,7 @@ import org.sleuthkit.datamodel.TskCoreException; public class CentralRepositoryService implements AutopsyService { private CaseEventListener caseEventListener = new CaseEventListener(); - + @Override @NbBundle.Messages({ "CentralRepositoryService.serviceName=Central Repository Service" @@ -45,7 +45,8 @@ public class CentralRepositoryService implements AutopsyService { } @NbBundle.Messages({ - "CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates..." + "CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates...", + "CentralRepositoryService.progressMsg.startingListener=Starting events listener..." }) @Override public void openCaseResources(CaseContext context) throws AutopsyServiceException { @@ -56,17 +57,20 @@ public class CentralRepositoryService implements AutopsyService { ProgressIndicator progress = context.getProgressIndicator(); progress.progress(Bundle.CentralRepositoryService_progressMsg_updatingSchema()); updateSchema(); - if (context.cancelRequested()) { return; } dataUpgradeForVersion1dot2(context.getCase()); - + if (context.cancelRequested()) { + return; + } + + progress.progress(Bundle.CentralRepositoryService_progressMsg_startingListener()); caseEventListener = new CaseEventListener(); - caseEventListener.installListeners(); + caseEventListener.startUp(); } - + @NbBundle.Messages({ "CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database...." }) @@ -74,18 +78,16 @@ public class CentralRepositoryService implements AutopsyService { public void closeCaseResources(CaseContext context) throws AutopsyServiceException { ProgressIndicator progress = context.getProgressIndicator(); progress.progress(Bundle.CentralRepositoryService_progressMsg_waitingForListeners()); - if (caseEventListener != null) { - caseEventListener.uninstallListeners(); caseEventListener.shutdown(); } - } /** - * Updates the central repository schema to the latest version. + * Updates the central repository database schema to the latest version. * - * @throws AutopsyServiceException + * @throws AutopsyServiceException The exception is thrown if there is an + * error updating the database schema. */ private void updateSchema() throws AutopsyServiceException { try { @@ -97,10 +99,11 @@ public class CentralRepositoryService implements AutopsyService { /** * Adds missing data source object IDs from data sources in this case to the - * corresponding records in the central repository. This is a data update to - * go with the v1.2 schema update. + * corresponding records in the central repository database. This is a data + * update to go with the v1.2 schema update. * - * @throws AutopsyServiceException + * @throws AutopsyServiceException The exception is thrown if there is an + * error updating the database. */ private void dataUpgradeForVersion1dot2(Case currentCase) throws AutopsyServiceException { try { diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED index d71782c0ee..3b1dde1896 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED @@ -8,11 +8,4 @@ CentralRepositoryNotificationDialog.bulletThree=Create personas that group accou CentralRepositoryNotificationDialog.bulletTwo=Identify where an item was previously seen CentralRepositoryNotificationDialog.finalRemarks=To limit what is stored, use the Central Repository options panel. CentralRepositoryNotificationDialog.header=Autopsy stores data about each case in its Central Repository. -IngestEventsListener.ingestmodule.name=Central Repository -IngestEventsListener.prevCaseComment.text=Previous Case: -# {0} - typeName -# {1} - count -IngestEventsListener.prevCount.text=Number of previous {0}: {1} -IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository) -IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository) Installer.centralRepoUpgradeFailed.title=Central repository disabled diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index 53fb9454af..92cfcde595 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -30,6 +30,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; @@ -64,7 +65,6 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.datamodel.Tag; import org.sleuthkit.autopsy.events.AutopsyEvent; -import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.datamodel.AnalysisResult; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -79,16 +79,14 @@ import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.SleuthkitCase; /** - * Listen for case events and update entries in the Central Repository database - * accordingly + * An Autopsy events listener for case events relevant to the central + * repository. */ @Messages({"caseeventlistener.evidencetag=Evidence"}) public final class CaseEventListener implements PropertyChangeListener { private static final Logger LOGGER = Logger.getLogger(CaseEventListener.class.getName()); - private final ExecutorService jobProcessingExecutor; - private static final String CASE_EVENT_THREAD_NAME = "Case-Event-Listener-%d"; - + private static final String CASE_EVENT_THREAD_NAME = "CR-Case-Event-Listener-%d"; private static final Set CASE_EVENTS_OF_INTEREST = EnumSet.of( Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED, @@ -98,12 +96,72 @@ public final class CaseEventListener implements PropertyChangeListener { Case.Events.CURRENT_CASE, Case.Events.DATA_SOURCE_NAME_CHANGED, Case.Events.OS_ACCT_INSTANCES_ADDED); + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + private static final AtomicBoolean createOSAcctCorrAttrs = new AtomicBoolean(); + private static final AtomicBoolean flagPreviouslySeenOSAccts = new AtomicBoolean(); + private final ExecutorService jobProcessingExecutor; + /** + * Set whether or not central repository case event listeners should create + * correlation attributes for new OS Accounts. + * + * @param flag True or false. + */ + public static void setCreateOsAcctCorrAttrs(boolean flag) { + createOSAcctCorrAttrs.set(flag); + } + + /** + * Gets whether or not central repository case event listeners should create + * correlation attributes for new OS Accounts. + * + * @return flag True or false. + */ + public static boolean createOsAcctCorrAttrs() { + return createOSAcctCorrAttrs.get(); + } + + /** + * Sets whether or not central repository case event listeners should create + * previously seen analyis results for OS accounts. + * + * @param flag True or false. + */ + public static void setFlagPrevSeenOsAccts(boolean flag) { + flagPreviouslySeenOSAccts.set(flag); + } + + /** + * Gets whether or not central repository case event listeners should create + * previously seen analyis results for OS accounts. + * + * @return flag True or false. + */ + public static boolean flagPrevSeenOsAccts() { + return flagPreviouslySeenOSAccts.get(); + } + + /** + * Contructs an Autopsy events listener for case events relevant to the + * central repository. + */ public CaseEventListener() { jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(CASE_EVENT_THREAD_NAME).build()); } + /** + * Starts up the listener. + */ + public void startUp() { + Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); + } + + /** + * Shuts down the listener. + */ public void shutdown() { + Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor); } @@ -113,92 +171,75 @@ public final class CaseEventListener implements PropertyChangeListener { return; } - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Failed to get instance of db manager.", ex); + if (!CentralRepository.isEnabled()) { return; } - // If any changes are made to which event types are handled the change - // must also be made to CASE_EVENTS_OF_INTEREST. + CentralRepository centralRepo; + try { + centralRepo = CentralRepository.getInstance(); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, "Failed to access central repository", ex); + return; + } + + /* + * IMPORTANT: If any changes are made to which event types are handled, + * the change must also be made to the contents of the + * CASE_EVENTS_OF_INTEREST set. + */ switch (Case.Events.valueOf(evt.getPropertyName())) { case CONTENT_TAG_ADDED: - case CONTENT_TAG_DELETED: { - jobProcessingExecutor.submit(new ContentTagTask(dbManager, evt)); - } - break; - + case CONTENT_TAG_DELETED: + jobProcessingExecutor.submit(new ContentTagTask(centralRepo, evt)); + break; case BLACKBOARD_ARTIFACT_TAG_DELETED: - case BLACKBOARD_ARTIFACT_TAG_ADDED: { - jobProcessingExecutor.submit(new BlackboardTagTask(dbManager, evt)); - } - break; - - case DATA_SOURCE_ADDED: { - jobProcessingExecutor.submit(new DataSourceAddedTask(dbManager, evt)); - } - break; - case TAG_DEFINITION_CHANGED: { + case BLACKBOARD_ARTIFACT_TAG_ADDED: + jobProcessingExecutor.submit(new ArtifactTagTask(centralRepo, evt)); + break; + case DATA_SOURCE_ADDED: + jobProcessingExecutor.submit(new DataSourceAddedTask(centralRepo, evt)); + break; + case TAG_DEFINITION_CHANGED: jobProcessingExecutor.submit(new TagDefinitionChangeTask(evt)); - } - break; - case CURRENT_CASE: { - jobProcessingExecutor.submit(new CurrentCaseTask(dbManager, evt)); - } - break; - case DATA_SOURCE_NAME_CHANGED: { - jobProcessingExecutor.submit(new DataSourceNameChangedTask(dbManager, evt)); - } - break; + break; + case CURRENT_CASE: + jobProcessingExecutor.submit(new CurrentCaseTask(centralRepo, evt)); + break; + case DATA_SOURCE_NAME_CHANGED: + jobProcessingExecutor.submit(new DataSourceNameChangedTask(centralRepo, evt)); + break; case OS_ACCT_INSTANCES_ADDED: { - if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) { - jobProcessingExecutor.submit(new OsAccountInstancesAddedTask(dbManager, evt)); - } + jobProcessingExecutor.submit(new OsAccountInstancesAddedTask(centralRepo, evt)); } break; } } - /* - * Add all of our Case Event Listeners to the case. + /** + * Determines whether or not a tag has notable status. + * + * @param tag The tag. + * + * @return True or false. */ - public void installListeners() { - Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); - } - - /* - * Remove all of our Case Event Listeners from the case. - */ - public void uninstallListeners() { - Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this); + private static boolean isNotableTag(Tag tag) { + return (tag != null && isNotableTagDefinition(tag.getName())); } /** - * Returns true if the tag has a notable status. + * Determines whether or not a tag definition has notable status. * - * @param t The tag to use in determination. + * @param tagDef The tag definition. * - * @return Whether or not it is a notable tag. + * @return True or false. */ - private static boolean isNotableTag(Tag t) { - return (t != null && isNotableTagName(t.getName())); + private static boolean isNotableTagDefinition(TagName tagDef) { + return (tagDef != null && TagsManager.getNotableTagDisplayNames().contains(tagDef.getDisplayName())); } /** - * Returns true if the tag name has a notable status. - * - * @param t The tag name to use in determination. - * - * @return Whether or not it is a notable tag name. - */ - private static boolean isNotableTagName(TagName t) { - return (t != null && TagsManager.getNotableTagDisplayNames().contains(t.getDisplayName())); - } - - /** - * Searches a list of tags for a tag with a notable status. + * Searches a list of tags for a tag with notable status. * * @param tags The tags to search. * @@ -208,7 +249,6 @@ public final class CaseEventListener implements PropertyChangeListener { if (tags == null) { return false; } - return tags.stream() .filter(CaseEventListener::isNotableTag) .findFirst() @@ -216,28 +256,32 @@ public final class CaseEventListener implements PropertyChangeListener { } /** - * Sets the known status of a blackboard artifact in the central repository. + * Sets the notable (known) status of a central repository correlation + * attribute corresponding to an artifact. * - * @param dbManager The central repo database. - * @param bbArtifact The blackboard artifact to set known status. - * @param knownStatus The new known status. + * @param centralRepo The central repository. + * @param artifact The artifact. + * @param knownStatus The new notable status. */ - private static void setArtifactKnownStatus(CentralRepository dbManager, BlackboardArtifact bbArtifact, TskData.FileKnown knownStatus) { - List convertedArtifacts = new ArrayList<>(); - if (bbArtifact instanceof DataArtifact) { - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) bbArtifact)); - } else if (bbArtifact instanceof AnalysisResult) { - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) bbArtifact)); + private static void setArtifactKnownStatus(CentralRepository centralRepo, BlackboardArtifact artifact, TskData.FileKnown knownStatus) { + List corrAttrInstances = new ArrayList<>(); + if (artifact instanceof DataArtifact) { + corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) artifact)); + } else if (artifact instanceof AnalysisResult) { + corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) artifact)); } - for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { + for (CorrelationAttributeInstance corrAttrInstance : corrAttrInstances) { try { - dbManager.setAttributeInstanceKnownStatus(eamArtifact, knownStatus); + centralRepo.setAttributeInstanceKnownStatus(corrAttrInstance, knownStatus); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database while setting artifact known status.", ex); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error setting correlation attribute instance known status", corrAttrInstance), ex); //NON-NLS } } } + /** + * A task RJCTODO + */ private final class ContentTagTask implements Runnable { private final CentralRepository dbManager; @@ -359,12 +403,15 @@ public final class CaseEventListener implements PropertyChangeListener { } } - private final class BlackboardTagTask implements Runnable { + /** + * A task RJCTODO + */ + private final class ArtifactTagTask implements Runnable { private final CentralRepository dbManager; private final PropertyChangeEvent event; - private BlackboardTagTask(CentralRepository db, PropertyChangeEvent evt) { + private ArtifactTagTask(CentralRepository db, PropertyChangeEvent evt) { dbManager = db; event = evt; } @@ -478,6 +525,9 @@ public final class CaseEventListener implements PropertyChangeListener { } + /** + * A task RJCTODO + */ private final class TagDefinitionChangeTask implements Runnable { private final PropertyChangeEvent event; @@ -589,6 +639,9 @@ public final class CaseEventListener implements PropertyChangeListener { } //TAG_STATUS_CHANGED } + /** + * A task RJCTODO + */ private final class DataSourceAddedTask implements Runnable { private final CentralRepository dbManager; @@ -626,6 +679,9 @@ public final class CaseEventListener implements PropertyChangeListener { } // DATA_SOURCE_ADDED } + /** + * A task RJCTODO + */ private final class CurrentCaseTask implements Runnable { private final CentralRepository dbManager; @@ -662,13 +718,15 @@ public final class CaseEventListener implements PropertyChangeListener { } // CURRENT_CASE } - @NbBundle.Messages({"CaseEventsListener.module.name=Central Repository", - "CaseEventsListener.prevCaseComment.text=Users seen in previous cases", - "CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"}) /** + * A task RJCTODO + * * Add OsAccount Instance to CR and find interesting items based on the * OsAccount */ + @NbBundle.Messages({"CaseEventsListener.module.name=Central Repository", + "CaseEventsListener.prevCaseComment.text=Users seen in previous cases", + "CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"}) private final class OsAccountInstancesAddedTask implements Runnable { private final CentralRepository dbManager; @@ -682,12 +740,9 @@ public final class CaseEventListener implements PropertyChangeListener { @Override public void run() { - //Nothing to do here if the central repo is not enabled or if ingest is running but is set to not save data/make artifacts - if (!CentralRepository.isEnabled() - || (IngestManager.getInstance().isIngestRunning() && !(IngestEventsListener.isFlagSeenDevices() || IngestEventsListener.shouldCreateCrProperties()))) { + if (!createOsAcctCorrAttrs() && !flagPrevSeenOsAccts()) { return; } - final OsAcctInstancesAddedEvent osAcctInstancesAddedEvent = (OsAcctInstancesAddedEvent) event; List addedOsAccountNew = osAcctInstancesAddedEvent.getOsAccountInstances(); for (OsAccountInstance osAccountInstance : addedOsAccountNew) { @@ -700,16 +755,13 @@ public final class CaseEventListener implements PropertyChangeListener { Optional accountAddr = osAccount.getAddr(); try { - // Save to the database if requested - if (IngestEventsListener.shouldCreateCrProperties()) { + if (createOsAcctCorrAttrs()) { for (CorrelationAttributeInstance correlationAttributeInstance : correlationAttributeInstances) { dbManager.addArtifactInstance(correlationAttributeInstance); } } - // Look up and create artifacts for previously seen accounts if requested - if (IngestEventsListener.isFlagSeenDevices()) { - + if (flagPrevSeenOsAccts()) { CorrelationAttributeInstance instanceWithTypeValue = null; for (CorrelationAttributeInstance instance : correlationAttributeInstances) { if (instance.getCorrelationType().getId() == CorrelationAttributeInstance.OSACCOUNT_TYPE_ID) { @@ -732,9 +784,10 @@ public final class CaseEventListener implements PropertyChangeListener { // calculate score Score score; int numCases = caseDisplayNames.size(); - if (numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) { + // RJCTODO: Centralize constants (consider) + if (numCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) { + } else if (numCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { score = Score.SCORE_NONE; } else { // don't make an Analysis Result, the artifact is too common. @@ -769,7 +822,6 @@ public final class CaseEventListener implements PropertyChangeListener { } } } - } catch (CorrelationAttributeNormalizationException ex) { LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS } catch (CentralRepoException ex) { @@ -782,6 +834,9 @@ public final class CaseEventListener implements PropertyChangeListener { } } + /** + * RJCTODO + */ private final class DataSourceNameChangedTask implements Runnable { private final CentralRepository dbManager; @@ -815,6 +870,7 @@ public final class CaseEventListener implements PropertyChangeListener { LOGGER.log(Level.SEVERE, "No open case", ex); } } - } // DATA_SOURCE_NAME_CHANGED + } } + } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED index d15eb90cb2..179469aae4 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED @@ -9,6 +9,13 @@ CentralRepoIngestModule_notable_message_header=A file in this data source CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0} CentralRepoIngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation CentralRepoIngestModuleFactory.ingestmodule.name=Central Repository +# {0} - list of cases +CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0} +CrDataArtifactIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) +# {0} - list of cases +CrDataArtifactIngestModule_prevSeenJustification=Previously seen in cases {0} +CrDataArtifactIngestModule_prevSeenSetName=Previously Seen (Central Repository) +CrDataArtifactIngestModule_prevUnseenJustification=Previously seen in zero cases IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings IngestSettingsPanel.flagTaggedNotableItemsCheckbox.text=Flag items previously tagged as notable IngestSettingsPanel.flagPreviouslySeenDevicesCheckbox.text=Flag devices and users previously seen in other cases diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index cd3aad8298..f90a74dd83 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -38,6 +38,7 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNor import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; +import org.sleuthkit.autopsy.centralrepository.eventlisteners.CaseEventListener; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; @@ -112,6 +113,15 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } catch (CentralRepoException ex) { throw new IngestModuleException("Error accessing central repository", ex); } + /* + * Pass the relevant ingest job settings on to the case events listener + * for the central repository. Note that the listener's dependency on + * these settings currently means that it can only react to new OS + * account instances events when an ingest job with this module enabled + * is running. + */ + CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrelationAttrs); + CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); } @Override @@ -307,7 +317,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo new BlackboardAttribute( TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue)); - makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification); + makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification()); } /** @@ -350,6 +360,15 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * job has hash values that match those in the case database. */ syncDataSourceHashes(); + /* + * Clear the relevant ingest job settings that were passed on to the + * case events listener for the central repository. Note that the + * listener's dependency on these settings currently means that it can + * only react to new OS account instances events when an ingest job with + * this module enabled is running. + */ + CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrelationAttrs); + CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); } /** diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index 7bac3a73a6..bdcb0afece 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -190,8 +190,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { @Override public void shutDown() { - IngestEventsListener.decrementCorrelationEngineModuleCount(); - if ((CentralRepository.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) { return; } @@ -227,8 +225,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; - IngestEventsListener.incrementCorrelationEngineModuleCount(); - if (CentralRepository.isEnabled() == false) { /* * Not throwing the customary exception for now. This is a From 25ca78351854059b277557654b6cec80024a3fc4 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 1 Nov 2021 17:14:14 -0400 Subject: [PATCH 003/142] 7895 CR data artifact ingest module --- .../eventlisteners/CaseEventListener.java | 40 ++++----------- .../ingestmodule/Bundle.properties-MERGED | 3 ++ .../CentralRepoDataArtifactIngestModule.java | 51 ++++++++++--------- 3 files changed, 41 insertions(+), 53 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index 92cfcde595..b3da5c0274 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -96,8 +96,8 @@ public final class CaseEventListener implements PropertyChangeListener { Case.Events.CURRENT_CASE, Case.Events.DATA_SOURCE_NAME_CHANGED, Case.Events.OS_ACCT_INSTANCES_ADDED); - private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; - private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; // Also appears in CentralRepoDataArtifactIngestModule + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; // Also appears in CentralRepoDataArtifactIngestModule private static final AtomicBoolean createOSAcctCorrAttrs = new AtomicBoolean(); private static final AtomicBoolean flagPreviouslySeenOSAccts = new AtomicBoolean(); private final ExecutorService jobProcessingExecutor; @@ -118,7 +118,7 @@ public final class CaseEventListener implements PropertyChangeListener { * * @return flag True or false. */ - public static boolean createOsAcctCorrAttrs() { + public static boolean getCreateOsAcctCorrAttrs() { return createOSAcctCorrAttrs.get(); } @@ -138,7 +138,7 @@ public final class CaseEventListener implements PropertyChangeListener { * * @return flag True or false. */ - public static boolean flagPrevSeenOsAccts() { + public static boolean getFlagPrevSeenOsAccts() { return flagPreviouslySeenOSAccts.get(); } @@ -279,9 +279,6 @@ public final class CaseEventListener implements PropertyChangeListener { } } - /** - * A task RJCTODO - */ private final class ContentTagTask implements Runnable { private final CentralRepository dbManager; @@ -403,9 +400,6 @@ public final class CaseEventListener implements PropertyChangeListener { } } - /** - * A task RJCTODO - */ private final class ArtifactTagTask implements Runnable { private final CentralRepository dbManager; @@ -525,9 +519,6 @@ public final class CaseEventListener implements PropertyChangeListener { } - /** - * A task RJCTODO - */ private final class TagDefinitionChangeTask implements Runnable { private final PropertyChangeEvent event; @@ -639,9 +630,6 @@ public final class CaseEventListener implements PropertyChangeListener { } //TAG_STATUS_CHANGED } - /** - * A task RJCTODO - */ private final class DataSourceAddedTask implements Runnable { private final CentralRepository dbManager; @@ -679,9 +667,6 @@ public final class CaseEventListener implements PropertyChangeListener { } // DATA_SOURCE_ADDED } - /** - * A task RJCTODO - */ private final class CurrentCaseTask implements Runnable { private final CentralRepository dbManager; @@ -719,10 +704,9 @@ public final class CaseEventListener implements PropertyChangeListener { } /** - * A task RJCTODO - * - * Add OsAccount Instance to CR and find interesting items based on the - * OsAccount + * Adds OS account instances to the central repository and creates + * previously seen analysis results for them if the instances havd been seen + * in other cases. */ @NbBundle.Messages({"CaseEventsListener.module.name=Central Repository", "CaseEventsListener.prevCaseComment.text=Users seen in previous cases", @@ -740,7 +724,7 @@ public final class CaseEventListener implements PropertyChangeListener { @Override public void run() { - if (!createOsAcctCorrAttrs() && !flagPrevSeenOsAccts()) { + if (!getCreateOsAcctCorrAttrs() && !getFlagPrevSeenOsAccts()) { return; } final OsAcctInstancesAddedEvent osAcctInstancesAddedEvent = (OsAcctInstancesAddedEvent) event; @@ -755,13 +739,13 @@ public final class CaseEventListener implements PropertyChangeListener { Optional accountAddr = osAccount.getAddr(); try { - if (createOsAcctCorrAttrs()) { + if (getCreateOsAcctCorrAttrs()) { for (CorrelationAttributeInstance correlationAttributeInstance : correlationAttributeInstances) { dbManager.addArtifactInstance(correlationAttributeInstance); } } - if (flagPrevSeenOsAccts()) { + if (getFlagPrevSeenOsAccts()) { CorrelationAttributeInstance instanceWithTypeValue = null; for (CorrelationAttributeInstance instance : correlationAttributeInstances) { if (instance.getCorrelationType().getId() == CorrelationAttributeInstance.OSACCOUNT_TYPE_ID) { @@ -784,7 +768,6 @@ public final class CaseEventListener implements PropertyChangeListener { // calculate score Score score; int numCases = caseDisplayNames.size(); - // RJCTODO: Centralize constants (consider) if (numCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { score = Score.SCORE_LIKELY_NOTABLE; } else if (numCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { @@ -834,9 +817,6 @@ public final class CaseEventListener implements PropertyChangeListener { } } - /** - * RJCTODO - */ private final class DataSourceNameChangedTask implements Runnable { private final CentralRepository dbManager; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED index 179469aae4..be81f65cb9 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED @@ -9,6 +9,9 @@ CentralRepoIngestModule_notable_message_header=A file in this data source CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0} CentralRepoIngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation CentralRepoIngestModuleFactory.ingestmodule.name=Central Repository +CrDataArtifactIngestModule_crInaccessibleErrMsg=Error accessing central repository +CrDataArtifactIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled +CrDataArtifactIngestModule_noCurrentCaseErrMsg=Error getting current case # {0} - list of cases CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0} CrDataArtifactIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index f90a74dd83..ab7f476684 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -66,12 +66,12 @@ import org.sleuthkit.datamodel.TskData; */ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule { - private static final Logger logger = Logger.getLogger(CorrelationAttributeInstance.class.getName()); + private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName()); private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); - private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; - private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; - private final Set corrAttrsAlreadyProcessed = new LinkedHashSet<>(); - private final boolean saveCorrelationAttrs; + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; // Also appears in CaseEventListener + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; // Also appears in CaseEventListener + private final Set corrAttrsCreated; + private final boolean saveCorrAttrs; private final boolean flagNotableItems; private final boolean flagSeenDevices; private final boolean flagUniqueArtifacts; @@ -91,27 +91,32 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * @param settings The ingest job settings for this module. */ CentralRepoDataArtifactIngestModule(IngestSettings settings) { - saveCorrelationAttrs = settings.shouldCreateCorrelationProperties(); + corrAttrsCreated = new LinkedHashSet<>(); + saveCorrAttrs = settings.shouldCreateCorrelationProperties(); flagNotableItems = settings.isFlagTaggedNotableItems(); flagSeenDevices = settings.isFlagPreviousDevices(); flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); } + @NbBundle.Messages({ + "CrDataArtifactIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled", + "CrDataArtifactIngestModule_noCurrentCaseErrMsg=Error getting current case", + "CrDataArtifactIngestModule_crInaccessibleErrMsg=Error accessing central repository",}) @Override public void startUp(IngestJobContext context) throws IngestModuleException { dataSource = context.getDataSource(); ingestJobId = context.getJobId(); if (!CentralRepository.isEnabled()) { - throw new IngestModuleException("Central repository required, but not enabled"); + throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crNotEnabledErrMsg()); // May be displayed to user. } try { currentCase = Case.getCurrentCaseThrows(); blackboard = currentCase.getSleuthkitCase().getBlackboard(); centralRepo = CentralRepository.getInstance(); } catch (NoCurrentCaseException ex) { - throw new IngestModuleException("Error getting current case", ex); + throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_noCurrentCaseErrMsg(), ex); // May be displayed to user. } catch (CentralRepoException ex) { - throw new IngestModuleException("Error accessing central repository", ex); + throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crInaccessibleErrMsg(), ex); // May be displayed to user. } /* * Pass the relevant ingest job settings on to the case events listener @@ -120,7 +125,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * account instances events when an ingest job with this module enabled * is running. */ - CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrelationAttrs); + CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrAttrs); CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); } @@ -128,7 +133,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo public ProcessResult process(DataArtifact artifact) { List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(artifact); for (CorrelationAttributeInstance corrAttr : corrAttrs) { - if (!corrAttrsAlreadyProcessed.add(corrAttr.toString())) { + if (!corrAttrsCreated.add(corrAttr.toString())) { continue; } @@ -136,11 +141,11 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo makeAnalysisResults(artifact, corrAttr); } - if (saveCorrelationAttrs) { + if (saveCorrAttrs) { try { centralRepo.addAttributeInstanceBulk(corrAttr); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Error doing bulk add of correlation attribute to central repository (%s) ", corrAttr), ex); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error doing bulk add of correlation attribute to central repository (%s) ", corrAttr), ex); //NON-NLS } } } @@ -224,15 +229,15 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo try { previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { - CorrelationAttributeInstance instance = iterator.next(); - if (instance.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) { + CorrelationAttributeInstance prevOccurrence = iterator.next(); + if (prevOccurrence.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) { iterator.remove(); } } } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.SEVERE, String.format("Error normalizing correlation attribute value (s)", corrAttr), ex); // NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value (s)", corrAttr), ex); // NON-NLS } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute (s)", corrAttr), ex); // NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute (s)", corrAttr), ex); // NON-NLS } return previousOccurrences; } @@ -338,11 +343,11 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo try { blackboard.postArtifact(analysisResult, CentralRepoIngestModuleFactory.getModuleName(), ingestJobId); } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, String.format("Error posting analysis result to blackboard (*s)", analysisResult), ex); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error posting analysis result to blackboard (*s)", analysisResult), ex); //NON-NLS } } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error creating analysis result", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error creating analysis result", ex); // NON-NLS } } @@ -351,7 +356,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo try { centralRepo.commitAttributeInstancesBulk(); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error doing final bulk commit of correlation attributes", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error doing final bulk commit of correlation attributes", ex); // NON-NLS } /* * Data artifact ingest modules are shut down at the end of the ingest @@ -367,7 +372,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * only react to new OS account instances events when an ingest job with * this module enabled is running. */ - CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrelationAttrs); + CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrAttrs); CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); } @@ -420,9 +425,9 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); } } From 24c72955873169a2649db9582eb2e3a3f880ed37 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 1 Nov 2021 17:24:48 -0400 Subject: [PATCH 004/142] 7895 CR data artifact ingest module --- .../ingestmodule/CentralRepoFileIngestModule.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index bdcb0afece..debab4eaad 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -84,7 +84,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { private final boolean flagTaggedNotableItems; private Blackboard blackboard; private final boolean createCorrelationProperties; - private IngestJobContext context; /** * Instantiate the Central Repository ingest module. @@ -223,8 +222,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { - this.context = context; - if (CentralRepository.isEnabled() == false) { /* * Not throwing the customary exception for now. This is a @@ -331,8 +328,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) .getAnalysisResult(); try { - // index the artifact for keyword search - blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId()); + blackboard.postArtifact(tifArtifact, MODULE_NAME, jobId); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS } From b0c926bd19f1ad9fb4a56f0d8418dcc5da826c7b Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 2 Nov 2021 11:19:23 -0400 Subject: [PATCH 005/142] 7895 CR data artifact ingest module --- .../CentralRepoDataArtifactIngestModule.java | 29 +++++----- .../CentralRepoFileIngestModule.java | 56 +++++++++---------- .../ingestmodule/IngestSettings.java | 14 +++-- 3 files changed, 47 insertions(+), 52 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index ab7f476684..80b710c494 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -66,10 +66,7 @@ import org.sleuthkit.datamodel.TskData; */ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule { - private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName()); - private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); - private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; // Also appears in CaseEventListener - private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; // Also appears in CaseEventListener + private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName()); private final Set corrAttrsCreated; private final boolean saveCorrAttrs; private final boolean flagNotableItems; @@ -259,10 +256,10 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo private void makePreviousNotableAnalysisResult(DataArtifact artifact, List previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { String prevCases = previousCases.stream().distinct().collect(Collectors.joining(",")); String justification = Bundle.CrDataArtifactIngestModule_notableJustification(prevCases); - Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CrDataArtifactIngestModule_notableSetName()), - new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), - new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), - new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_notableSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification); } @@ -284,9 +281,9 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo private void makePreviouslySeenAnalysisResult(DataArtifact artifact, List previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { Score score; int numCases = previousCases.size(); - if (numCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { + if (numCases <= AnalysisParams.MAX_PREV_CASES_FOR_NOTABLE_SCORE) { score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { + } else if (numCases > AnalysisParams.MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= AnalysisParams.MAX_PREV_CASES_FOR_PREV_SEEN) { score = Score.SCORE_NONE; } else { /* @@ -298,10 +295,10 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo String prevCases = previousCases.stream().distinct().collect(Collectors.joining(",")); String justification = Bundle.CrDataArtifactIngestModule_prevSeenJustification(prevCases); Collection analysisResultAttributes = Arrays.asList( - new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CrDataArtifactIngestModule_prevSeenSetName()), - new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), - new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), - new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_prevSeenSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score, justification); } @@ -317,10 +314,10 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo }) private void makeAndPostPreviouslyUnseenArtifact(DataArtifact artifact, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, + TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, + TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue)); makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification()); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index debab4eaad..365d1cfd9e 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -67,13 +67,7 @@ import org.sleuthkit.datamodel.Score; "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) final class CentralRepoFileIngestModule implements FileIngestModule { - private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); - static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false; - static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false; - static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false; - static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; - - private final static Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); + private final static Logger LOGGER = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); @@ -110,7 +104,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Exception while getting open case.", ex); + LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); return ProcessResult.ERROR; } @@ -126,7 +120,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { dbManager = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); + LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); return ProcessResult.ERROR; } @@ -154,10 +148,10 @@ final class CentralRepoFileIngestModule implements FileIngestModule { postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5); } } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS + LOGGER.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS return ProcessResult.ERROR; } } @@ -177,10 +171,10 @@ final class CentralRepoFileIngestModule implements FileIngestModule { abstractFile.getId()); dbManager.addAttributeInstanceBulk(cefi); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + LOGGER.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; } } @@ -196,19 +190,19 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { dbManager = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); + LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); return; } try { dbManager.commitAttributeInstancesBulk(); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS } try { Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamDataSource); - logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS + LOGGER.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS } // TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk. @@ -241,14 +235,14 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { autopsyCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Exception while getting open case.", ex); + LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); throw new IngestModuleException("Exception while getting open case.", ex); } // Don't allow sqlite central repo databases to be used for multi user cases if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { - logger.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository."); + LOGGER.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository."); throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS } jobId = context.getJobId(); @@ -257,14 +251,14 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { centralRepoDb = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS } try { filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS } @@ -277,7 +271,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource()); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS } // TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter @@ -291,7 +285,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { centralRepoDb.newDataSource(eamDataSource); } } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS } @@ -309,16 +303,16 @@ final class CentralRepoFileIngestModule implements FileIngestModule { String justification = "Previously marked as notable in cases " + prevCases; Collection attributes = Arrays.asList( new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, + TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CentralRepoIngestModule_prevTaggedSet_text()), new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, + TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), aType.getDisplayName()), new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, + TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), value), new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, + TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); try { // Create artifact if it doesn't already exist. @@ -328,17 +322,17 @@ final class CentralRepoFileIngestModule implements FileIngestModule { null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) .getAnalysisResult(); try { - blackboard.postArtifact(tifArtifact, MODULE_NAME, jobId); + blackboard.postArtifact(tifArtifact, CentralRepoIngestModuleFactory.getModuleName(), jobId); } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS + LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS } // send inbox message sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash(), caseDisplayNames); } } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS } catch (IllegalStateException ex) { - logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS + LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java index a977da13f2..027403490b 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestSettings.java @@ -26,7 +26,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings; final class IngestSettings implements IngestModuleIngestJobSettings { private static final long serialVersionUID = 1L; - + static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false; + static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false; + static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false; + static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; + private final boolean flagTaggedNotableItems; private final boolean flagPreviousDevices; private final boolean createCorrelationProperties; @@ -36,10 +40,10 @@ final class IngestSettings implements IngestModuleIngestJobSettings { * Instantiate the ingest job settings with default values. */ IngestSettings() { - this.flagTaggedNotableItems = CentralRepoFileIngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS; - this.flagPreviousDevices = CentralRepoFileIngestModule.DEFAULT_FLAG_PREVIOUS_DEVICES; - this.createCorrelationProperties = CentralRepoFileIngestModule.DEFAULT_CREATE_CR_PROPERTIES; - this.flagUniqueArtifacts = CentralRepoFileIngestModule.DEFAULT_FLAG_UNIQUE_DEVICES; + this.flagTaggedNotableItems = DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS; + this.flagPreviousDevices = DEFAULT_FLAG_PREVIOUS_DEVICES; + this.createCorrelationProperties = DEFAULT_CREATE_CR_PROPERTIES; + this.flagUniqueArtifacts = DEFAULT_FLAG_UNIQUE_DEVICES; } /** From 04c92eb22a8dc373d07b3dbd550c2048f27fd562 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 2 Nov 2021 11:34:06 -0400 Subject: [PATCH 006/142] 7895 CR data artifact ingest module --- .../ingestmodule/CentralRepoDataArtifactIngestModule.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index 80b710c494..ebfb1a67d5 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -350,6 +350,8 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo @Override public void shutDown() { + analyzeOsAccounts(); + try { centralRepo.commitAttributeInstancesBulk(); } catch (CentralRepoException ex) { @@ -372,7 +374,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrAttrs); CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); } - + /** * Ensures the data source in the central repository has hash values that * match those in the case database. From a35d7b556b5cbd5f1fad1cd8c0e189ef2cf34cc1 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 2 Nov 2021 11:34:25 -0400 Subject: [PATCH 007/142] 7895 CR data artifact ingest module --- .../ingestmodule/AnalysisParams.java | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100755 Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java new file mode 100755 index 0000000000..0c3c70619b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java @@ -0,0 +1,33 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.ingestmodule; + +/** + * A collection of analysis parameter constants used by the central repository + * ingest modules / event listeners. + */ +public class AnalysisParams { + + static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; + static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + + private AnalysisParams() { + } + +} From 2e23982effd4f26590e1bb95e2db7f5e4add3e86 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 4 Nov 2021 11:56:53 -0400 Subject: [PATCH 008/142] 7895 CR data artifact ingest module --- .../datamodel/CentralRepository.java | 23 +- .../datamodel/CorrelationAttributeUtil.java | 51 ++- .../datamodel/RdbmsCentralRepo.java | 44 +- .../eventlisteners/Bundle.properties-MERGED | 3 - .../eventlisteners/CaseEventListener.java | 185 +-------- .../ingestmodule/AnalysisParams.java | 33 -- .../ingestmodule/Bundle.properties-MERGED | 3 + .../CentralRepoDataArtifactIngestModule.java | 376 +++++++++++------- .../CentralRepoFileIngestModule.java | 2 +- .../SevenZipExtractor.java | 3 +- .../filetypeid/FileTypeIdIngestModule.java | 3 +- .../hashdatabase/HashDbIngestModule.java | 2 +- .../FilesIdentifierIngestModule.java | 3 +- .../pictureanalyzer/impls/EXIFProcessor.java | 4 +- 14 files changed, 333 insertions(+), 402 deletions(-) delete mode 100755 Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java index 3db1fea511..6127209811 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java @@ -310,18 +310,25 @@ public interface CentralRepository { List getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List values) throws CentralRepoException, CorrelationAttributeNormalizationException; /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValue of the given eamArtifact. + * Retrieves correlation attribute instances from the central repository + * that match a given attribute type and value. * - * @param aType The type of the artifact - * @param value The correlation value + * @param type The correlation attribute type. + * @param value The correlation attribute value. * - * @return List of artifact instances for a given type/value + * @return The matching correlation attribute instances. * - * @throws CorrelationAttributeNormalizationException - * @throws CentralRepoException + * @throws CorrelationAttributeNormalizationException The exception is + * thrown if the supplied + * correlation attribute + * value cannot be + * normlaized. + * @throws CentralRepoException The exception is + * thrown if there is an + * error querying the + * central repository. */ - List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws CentralRepoException, CorrelationAttributeNormalizationException; + List getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type type, String value) throws CentralRepoException, CorrelationAttributeNormalizationException; /** * Retrieves eamArtifact instances from the database that are associated diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 4243a36b51..624e668494 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -114,8 +114,55 @@ public class CorrelationAttributeUtil { return Collections.emptyList(); } - public static List makeCorrAttrsToSave(OsAccountInstance osAccountInstance) { - return makeCorrAttrsForSearch(osAccountInstance); + /** + * Gets the correlation attributes for an OS account. + * + * @param account The OS account. + * + * @return The correlation attributes. + */ + public static List makeCorrAttrsToSave(OsAccount account) { + List correlationAttrs = new ArrayList<>(); + if (CentralRepository.isEnabled()) { + Optional accountAddr = account.getAddr(); + if (accountAddr.isPresent() && !isSystemOsAccount(accountAddr.get())) { + try { + Content dataSource = account.getDataSource(); + CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); + CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( + CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), + accountAddr.get(), + correlationCase, + CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource), + dataSource.getName(), + "", + TskData.FileKnown.KNOWN, + account.getId()); + correlationAttrs.add(correlationAttributeInstance); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error getting data source for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error querying central repository for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } catch (NoCurrentCaseException ex) { + logger.log(Level.SEVERE, String.format("Error getting current case for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.SEVERE, String.format("Error normalizing correlation attribute for OS account '%s'", accountAddr.get()), ex); //NON-NLS + } + } + } + return correlationAttrs; + } + + /** + * Determines whether or not a given OS account address is a system account + * address. + * + * @param accountAddr The OS account address. + * + * @return True ofr false. + */ + private static boolean isSystemOsAccount(String accountAddr) { + return accountAddr.equals("S-1-5-18") || accountAddr.equals("S-1-5-19") || accountAddr.equals("S-1-5-20"); } /** diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java index 1b37217355..d7fd79dd8e 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java @@ -1289,7 +1289,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { if (values == null || values.isEmpty()) { throw new CorrelationAttributeNormalizationException("Cannot get artifact instances without specified values"); } - return getArtifactInstances(prepareGetInstancesSql(aType, values), aType); + return getCorrAttrInstances(prepareGetInstancesSql(aType, values), aType); } @Override @@ -1312,7 +1312,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { inValuesBuilder.append(sql); inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '"))); inValuesBuilder.append("')"); - return getArtifactInstances(inValuesBuilder.toString(), aType); + return getCorrAttrInstances(inValuesBuilder.toString(), aType); } /** @@ -1361,40 +1361,44 @@ abstract class RdbmsCentralRepo implements CentralRepository { } /** - * Retrieves eamArtifact instances from the database that are associated - * with the eamArtifactType and eamArtifactValues of the given eamArtifact. + * Retrieves correlation attribute instances from the central repository + * that match a given SQL query and correlation attribute type. * - * @param aType The type of the artifact - * @param values The list of correlation values to get - * CorrelationAttributeInstances for + * @param sql The SQL query. + * @param attrType The correlation attribute type. * - * @return List of artifact instances for a given type with the specified - * values + * @return The correlation attribute instanes. * - * @throws CorrelationAttributeNormalizationException - * @throws CentralRepoException + * @throws CorrelationAttributeNormalizationException The exception is + * thrown if the supplied + * correlation attribute + * value cannot be + * normlaized. + * @throws CentralRepoException The exception is + * thrown if there is an + * error querying the + * central repository. */ - private List getArtifactInstances(String sql, CorrelationAttributeInstance.Type aType) throws CorrelationAttributeNormalizationException, CentralRepoException { + private List getCorrAttrInstances(String sql, CorrelationAttributeInstance.Type attrType) throws CorrelationAttributeNormalizationException, CentralRepoException { + List corrAttrs = new ArrayList<>(); Connection conn = connect(); - List artifactInstances = new ArrayList<>(); - CorrelationAttributeInstance artifactInstance; PreparedStatement preparedStatement = null; ResultSet resultSet = null; try { preparedStatement = conn.prepareStatement(sql); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { - artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType); - artifactInstances.add(artifactInstance); + CorrelationAttributeInstance corrAttr = getCorrAttrFromResultSet(resultSet, attrType); + corrAttrs.add(corrAttr); } } catch (SQLException ex) { - throw new CentralRepoException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS + throw new CentralRepoException(String.format("Error getting correlation attributes using query %s", sql), ex); // NON-NLS } finally { CentralRepoDbUtil.closeResultSet(resultSet); CentralRepoDbUtil.closeStatement(preparedStatement); CentralRepoDbUtil.closeConnection(conn); } - return artifactInstances; + return corrAttrs; } /** @@ -1509,7 +1513,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { PreparedStatement preparedStatement = null; String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(instance.getCorrelationType()); ResultSet resultSet = null; - + try { if (correlationCaseId > 0 && sourceObjID != null && correlationDataSourceId > 0) { //The CorrelationCase is in the Central repository. @@ -3643,7 +3647,7 @@ abstract class RdbmsCentralRepo implements CentralRepository { * * @throws SQLException when an expected column name is not in the resultSet */ - private CorrelationAttributeInstance getEamArtifactInstanceFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException { + private CorrelationAttributeInstance getCorrAttrFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException { if (null == resultSet) { return null; } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED index 3b1dde1896..909275d224 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/Bundle.properties-MERGED @@ -1,7 +1,4 @@ caseeventlistener.evidencetag=Evidence -CaseEventsListener.module.name=Central Repository -CaseEventsListener.prevCaseComment.text=Users seen in previous cases -CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository) CentralRepositoryNotificationDialog.bulletHeader=This data is used to: CentralRepositoryNotificationDialog.bulletOne=Ignore common items (files, domains, and accounts) CentralRepositoryNotificationDialog.bulletThree=Create personas that group accounts diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index b3da5c0274..a45c4a47d7 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -22,19 +22,13 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.EnumSet; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; -import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; -import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -44,7 +38,6 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent; import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent; -import org.sleuthkit.autopsy.casemodule.events.OsAcctInstancesAddedEvent; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; @@ -62,21 +55,10 @@ import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.datamodel.Tag; import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.datamodel.AnalysisResult; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; import org.sleuthkit.datamodel.DataArtifact; -import org.sleuthkit.datamodel.OsAccount; -import org.sleuthkit.datamodel.OsAccountInstance; -import org.sleuthkit.datamodel.Score; -import org.sleuthkit.datamodel.SleuthkitCase; /** * An Autopsy events listener for case events relevant to the central @@ -94,54 +76,9 @@ public final class CaseEventListener implements PropertyChangeListener { Case.Events.DATA_SOURCE_ADDED, Case.Events.TAG_DEFINITION_CHANGED, Case.Events.CURRENT_CASE, - Case.Events.DATA_SOURCE_NAME_CHANGED, - Case.Events.OS_ACCT_INSTANCES_ADDED); - private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; // Also appears in CentralRepoDataArtifactIngestModule - private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; // Also appears in CentralRepoDataArtifactIngestModule - private static final AtomicBoolean createOSAcctCorrAttrs = new AtomicBoolean(); - private static final AtomicBoolean flagPreviouslySeenOSAccts = new AtomicBoolean(); + Case.Events.DATA_SOURCE_NAME_CHANGED); private final ExecutorService jobProcessingExecutor; - /** - * Set whether or not central repository case event listeners should create - * correlation attributes for new OS Accounts. - * - * @param flag True or false. - */ - public static void setCreateOsAcctCorrAttrs(boolean flag) { - createOSAcctCorrAttrs.set(flag); - } - - /** - * Gets whether or not central repository case event listeners should create - * correlation attributes for new OS Accounts. - * - * @return flag True or false. - */ - public static boolean getCreateOsAcctCorrAttrs() { - return createOSAcctCorrAttrs.get(); - } - - /** - * Sets whether or not central repository case event listeners should create - * previously seen analyis results for OS accounts. - * - * @param flag True or false. - */ - public static void setFlagPrevSeenOsAccts(boolean flag) { - flagPreviouslySeenOSAccts.set(flag); - } - - /** - * Gets whether or not central repository case event listeners should create - * previously seen analyis results for OS accounts. - * - * @return flag True or false. - */ - public static boolean getFlagPrevSeenOsAccts() { - return flagPreviouslySeenOSAccts.get(); - } - /** * Contructs an Autopsy events listener for case events relevant to the * central repository. @@ -209,10 +146,8 @@ public final class CaseEventListener implements PropertyChangeListener { case DATA_SOURCE_NAME_CHANGED: jobProcessingExecutor.submit(new DataSourceNameChangedTask(centralRepo, evt)); break; - case OS_ACCT_INSTANCES_ADDED: { - jobProcessingExecutor.submit(new OsAccountInstancesAddedTask(centralRepo, evt)); - } - break; + default: + break; } } @@ -703,120 +638,6 @@ public final class CaseEventListener implements PropertyChangeListener { } // CURRENT_CASE } - /** - * Adds OS account instances to the central repository and creates - * previously seen analysis results for them if the instances havd been seen - * in other cases. - */ - @NbBundle.Messages({"CaseEventsListener.module.name=Central Repository", - "CaseEventsListener.prevCaseComment.text=Users seen in previous cases", - "CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"}) - private final class OsAccountInstancesAddedTask implements Runnable { - - private final CentralRepository dbManager; - private final PropertyChangeEvent event; - private final String MODULE_NAME = Bundle.CaseEventsListener_module_name(); - - private OsAccountInstancesAddedTask(CentralRepository db, PropertyChangeEvent evt) { - dbManager = db; - event = evt; - } - - @Override - public void run() { - if (!getCreateOsAcctCorrAttrs() && !getFlagPrevSeenOsAccts()) { - return; - } - final OsAcctInstancesAddedEvent osAcctInstancesAddedEvent = (OsAcctInstancesAddedEvent) event; - List addedOsAccountNew = osAcctInstancesAddedEvent.getOsAccountInstances(); - for (OsAccountInstance osAccountInstance : addedOsAccountNew) { - try { - OsAccount osAccount = osAccountInstance.getOsAccount(); - List correlationAttributeInstances = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccountInstance); - if (correlationAttributeInstances.isEmpty()) { - return; - } - - Optional accountAddr = osAccount.getAddr(); - try { - if (getCreateOsAcctCorrAttrs()) { - for (CorrelationAttributeInstance correlationAttributeInstance : correlationAttributeInstances) { - dbManager.addArtifactInstance(correlationAttributeInstance); - } - } - - if (getFlagPrevSeenOsAccts()) { - CorrelationAttributeInstance instanceWithTypeValue = null; - for (CorrelationAttributeInstance instance : correlationAttributeInstances) { - if (instance.getCorrelationType().getId() == CorrelationAttributeInstance.OSACCOUNT_TYPE_ID) { - instanceWithTypeValue = instance; - break; - } - } - - if (instanceWithTypeValue != null) { - List previousOccurences = dbManager.getArtifactInstancesByTypeValue(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue()); - - for (CorrelationAttributeInstance instance : previousOccurences) { - //we can get the first instance here since the case for all attributes will be the same - if (!instance.getCorrelationCase().getCaseUUID().equals(instanceWithTypeValue.getCorrelationCase().getCaseUUID())) { - SleuthkitCase tskCase = osAccount.getSleuthkitCase(); - Blackboard blackboard = tskCase.getBlackboard(); - - List caseDisplayNames = dbManager.getListCasesHavingArtifactInstances(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue()); - - // calculate score - Score score; - int numCases = caseDisplayNames.size(); - if (numCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { - score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { - score = Score.SCORE_NONE; - } else { - // don't make an Analysis Result, the artifact is too common. - continue; - } - - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously seen in cases " + prevCases; - Collection attributesForNewArtifact = Arrays.asList( - new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.CaseEventsListener_prevExists_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - instance.getCorrelationType().getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - instanceWithTypeValue.getCorrelationValue()), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - BlackboardArtifact newAnalysisResult = osAccount.newAnalysisResult( - BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score, - null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult(); - try { - blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null); - break; - } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS - } - } - } - } - } - } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex); - } - } - } - } - private final class DataSourceNameChangedTask implements Runnable { private final CentralRepository dbManager; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java deleted file mode 100755 index 0c3c70619b..0000000000 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/AnalysisParams.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021-2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.centralrepository.ingestmodule; - -/** - * A collection of analysis parameter constants used by the central repository - * ingest modules / event listeners. - */ -public class AnalysisParams { - - static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; - static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; - - private AnalysisParams() { - } - -} diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED index be81f65cb9..f30a0b6309 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED @@ -15,8 +15,11 @@ CrDataArtifactIngestModule_noCurrentCaseErrMsg=Error getting current case # {0} - list of cases CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0} CrDataArtifactIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) +CrDataArtifactIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager # {0} - list of cases CrDataArtifactIngestModule_prevSeenJustification=Previously seen in cases {0} +CrDataArtifactIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository) +CrDataArtifactIngestModule_prevSeenOsAcctSetName=Users seen in previous cases CrDataArtifactIngestModule_prevSeenSetName=Previously Seen (Central Repository) CrDataArtifactIngestModule_prevUnseenJustification=Previously seen in zero cases IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index ebfb1a67d5..a44c33391a 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -21,9 +21,11 @@ package org.sleuthkit.autopsy.centralrepository.ingestmodule; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.logging.Level; import java.util.stream.Collectors; @@ -38,7 +40,6 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNor import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; -import org.sleuthkit.autopsy.centralrepository.eventlisteners.CaseEventListener; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; @@ -53,51 +54,58 @@ import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.OsAccount; +import org.sleuthkit.datamodel.OsAccountManager; import org.sleuthkit.datamodel.Score; +import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** - * A data artifact ingest module that adds correlation attributes for a data - * artifact to the central repository and makes analysis results based on - * previous occurences. When the ingest job is completed, ensures the data - * source in the central repository has hash values that match those in the case - * database. + * A data artifact ingest module that adds correlation attributes for data + * artifacts and OS accounts to the central repository and makes analysis + * results based on previous occurences. When the ingest job is completed, + * ensures the data source in the central repository has hash values that match + * those in the case database. */ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule { private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName()); - private final Set corrAttrsCreated; + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + private final Set corrAttrsAlreadyCreated; private final boolean saveCorrAttrs; private final boolean flagNotableItems; - private final boolean flagSeenDevices; + private final boolean flagPrevSeenDevices; private final boolean flagUniqueArtifacts; private Case currentCase; private Blackboard blackboard; + private OsAccountManager osAccountMgr; private CentralRepository centralRepo; private Content dataSource; private long ingestJobId; /** * Constructs a data artifact ingest module that adds correlation attributes - * for a data artifact to the central repository and makes analysis results - * based on previous occurences. When the ingest job is completed, ensures - * the data source in the central repository has hash values that match - * those in the case database. + * for data artifacts and OS accounts to the central repository and makes + * analysis results based on previous occurences. When the ingest job is + * completed, ensures the data source in the central repository has hash + * values that match those in the case database. * * @param settings The ingest job settings for this module. */ CentralRepoDataArtifactIngestModule(IngestSettings settings) { - corrAttrsCreated = new LinkedHashSet<>(); + corrAttrsAlreadyCreated = new LinkedHashSet<>(); saveCorrAttrs = settings.shouldCreateCorrelationProperties(); flagNotableItems = settings.isFlagTaggedNotableItems(); - flagSeenDevices = settings.isFlagPreviousDevices(); + flagPrevSeenDevices = settings.isFlagPreviousDevices(); flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); } @NbBundle.Messages({ "CrDataArtifactIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled", "CrDataArtifactIngestModule_noCurrentCaseErrMsg=Error getting current case", + "CrDataArtifactIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager", "CrDataArtifactIngestModule_crInaccessibleErrMsg=Error accessing central repository",}) @Override public void startUp(IngestJobContext context) throws IngestModuleException { @@ -108,83 +116,149 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } try { currentCase = Case.getCurrentCaseThrows(); - blackboard = currentCase.getSleuthkitCase().getBlackboard(); + SleuthkitCase tskCase = currentCase.getSleuthkitCase(); + blackboard = tskCase.getBlackboard(); + osAccountMgr = tskCase.getOsAccountManager(); centralRepo = CentralRepository.getInstance(); } catch (NoCurrentCaseException ex) { throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_noCurrentCaseErrMsg(), ex); // May be displayed to user. + } catch (TskCoreException ex) { + throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_osAcctMgrInaccessibleErrMsg(), ex); // May be displayed to user. } catch (CentralRepoException ex) { throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crInaccessibleErrMsg(), ex); // May be displayed to user. } - /* - * Pass the relevant ingest job settings on to the case events listener - * for the central repository. Note that the listener's dependency on - * these settings currently means that it can only react to new OS - * account instances events when an ingest job with this module enabled - * is running. - */ - CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrAttrs); - CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); } + /** + * Translates the attributes of a data artifact into central repository + * correlation attributes and uses them to create analysis results and new + * central repository correlation attribute instances, depending on ingest + * job settings. + * + * @param artifact The data artifact. + * + * @return An ingest module process result. + */ @Override public ProcessResult process(DataArtifact artifact) { List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(artifact); for (CorrelationAttributeInstance corrAttr : corrAttrs) { - if (!corrAttrsCreated.add(corrAttr.toString())) { + if (!corrAttrsAlreadyCreated.add(corrAttr.toString())) { + /* + * This is a bit of a time saver. Uniqueness constraints in the + * central repository prevent creation of duplicate correlation + * attributes, so this saves no-op central repository insert + * attempts. + */ continue; } - if (flagNotableItems || flagSeenDevices || flagUniqueArtifacts) { - makeAnalysisResults(artifact, corrAttr); - } + makeAnalysisResults(artifact, corrAttr); if (saveCorrAttrs) { try { centralRepo.addAttributeInstanceBulk(corrAttr); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error doing bulk add of correlation attribute to central repository (%s) ", corrAttr), ex); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for data artifact '%s' (job ID=%d)", corrAttr, artifact, ingestJobId), ex); //NON-NLS } } } return ProcessResult.OK; } + @Override + public void shutDown() { + if (saveCorrAttrs || flagPrevSeenDevices) { + analyzeOsAccounts(); + } + if (saveCorrAttrs) { + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", ingestJobId), ex); // NON-NLS + } + } + syncDataSourceHashes(); + } + /** - * Makes analysis results for a data artifact based on previous occurences, + * Adds correlation attributes to the central repository for the OS accounts + * in the data source and creates previously seen analysis results for the + * accounts if they have been seen in other cases. + */ + @NbBundle.Messages({ + "CrDataArtifactIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", + "CrDataArtifactIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" + }) + private void analyzeOsAccounts() { + try { + List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(dataSource.getId()); + for (OsAccount osAccount : osAccounts) { + process(osAccount); + } + } catch (TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", dataSource, ingestJobId), ex); + } + } + + /** + * Translates the attributes of a OS account into central repository + * correlation attributes and uses them to create analysis results and new + * central repository correlation attribute instances, depending on ingest + * job settings. + * + * @param osAccount The OS account. + */ + private void process(OsAccount osAccount) { + List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount); + for (CorrelationAttributeInstance corrAttr : corrAttrs) { + if (!corrAttrsAlreadyCreated.add(corrAttr.toString())) { + /* + * This is a bit of a time saver. Uniqueness constraints in the + * central repository prevent creation of duplicate correlation + * attributes, so this saves no-op central repository insert + * attempts. + */ + continue; + } + + makeAnalysisResults(osAccount, corrAttr); + + if (saveCorrAttrs) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for OS account '%s' (job ID=%d)", corrAttr, osAccount, ingestJobId), ex); + } + } + } + } + + /** + * Makes analysis results for a data artifact based on previous occurrences, * if any, of a correlation attribute. * * @param artifact The data artifact. * @param corrAttr A correlation attribute for the data artifact. */ private void makeAnalysisResults(DataArtifact artifact, CorrelationAttributeInstance corrAttr) { - List previousOccurrences = getPreviousOccurrences(corrAttr); - if (previousOccurrences.isEmpty()) { - return; - } - - /* - * Make a previously notable analysis result for the data artifact if - * the correlation attribute has been seen in another case and marked as - * notable (TskData.FileKnown.BAD). - */ + List previousOccurrences = null; if (flagNotableItems) { - List previousCaseNames = new ArrayList<>(); - for (CorrelationAttributeInstance occurrence : previousOccurrences) { - if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) { - previousCaseNames.add(occurrence.getCorrelationCase().getDisplayName()); // Dups are removed later + previousOccurrences = getOccurrencesInOtherCases(corrAttr); + if (!previousOccurrences.isEmpty()) { + Set previousCases = new HashSet<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) { + previousCases.add(occurrence.getCorrelationCase().getDisplayName()); + } + } + if (!previousCases.isEmpty()) { + makePrevNotableAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); } } - if (!previousCaseNames.isEmpty()) { - makePreviousNotableAnalysisResult(artifact, previousCaseNames, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); - } } - /* - * Make a previously seen analysis result result for the data artifact - * if the correlation attribute has been seen in another case and is a - * device or communication account attribute. - */ - if (flagSeenDevices && !previousOccurrences.isEmpty() + if (flagPrevSeenDevices && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID @@ -192,24 +266,45 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) { - List previousCaseNames = new ArrayList<>(); - for (CorrelationAttributeInstance occurrence : previousOccurrences) { - previousCaseNames.add(occurrence.getCorrelationCase().getDisplayName()); // Dups are removed later + if (previousOccurrences == null) { + previousOccurrences = getOccurrencesInOtherCases(corrAttr); } - if (!previousCaseNames.isEmpty()) { - makePreviouslySeenAnalysisResult(artifact, previousCaseNames, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + if (!previousOccurrences.isEmpty()) { + Set previousCases = getPreviousCases(previousOccurrences); + if (!previousCases.isEmpty()) { + makePrevSeenAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + } } } - /* - * Make a previously unseen analysis result result for the data artifact - * if the correlation attribute has not been seen in another case and is - * an app name or domain name attribute. - */ if (flagUniqueArtifacts && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { - makeAndPostPreviouslyUnseenArtifact(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + if (previousOccurrences == null) { + previousOccurrences = getOccurrencesInOtherCases(corrAttr); + } + if (previousOccurrences.isEmpty()) { + makePrevUnseenAnalysisResult(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + } + } + } + + /** + * Makes analysis results for a data artifact based on previous occurrences, + * if any, of a correlation attribute. + * + * @param artifact The data artifact. + * @param corrAttr A correlation attribute for the data artifact. + */ + private void makeAnalysisResults(OsAccount osAccount, CorrelationAttributeInstance corrAttr) { + if (flagPrevSeenDevices) { + List previousOccurrences = getOccurrencesInOtherCases(corrAttr); + if (!previousOccurrences.isEmpty()) { + Set previousCases = getPreviousCases(previousOccurrences); + if (!previousCases.isEmpty()) { + makePrevSeenAnalysisResult(osAccount, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + } + } } } @@ -221,7 +316,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * * @return The other occurrences of the correlation attribute. */ - private List getPreviousOccurrences(CorrelationAttributeInstance corrAttr) { + private List getOccurrencesInOtherCases(CorrelationAttributeInstance corrAttr) { List previousOccurrences = new ArrayList<>(); try { previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); @@ -232,17 +327,33 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value (s)", corrAttr), ex); // NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value for 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute (s)", corrAttr), ex); // NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS } return previousOccurrences; } /** - * Makes a previously notable analysis result for a data artifact. + * Gets a unique set of previous cases, represented by their names, from a + * list of previous occurrences of correlation attributes. * - * @param artifact The data artifact. + * @param previousOccurrences The correlations attributes. + * + * @return The names of the previous cases. + */ + private Set getPreviousCases(List previousOccurrences) { + Set previousCases = new HashSet<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + previousCases.add(occurrence.getCorrelationCase().getDisplayName()); + } + return previousCases; + } + + /** + * Makes a previously notable analysis result for a content. + * + * @param content The content. * @param previousCases The names of the cases in which the artifact was * deemed notable. * @param corrAttrType The type of the matched correlation attribute. @@ -253,21 +364,21 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo "# {0} - list of cases", "CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0}" }) - private void makePreviousNotableAnalysisResult(DataArtifact artifact, List previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { - String prevCases = previousCases.stream().distinct().collect(Collectors.joining(",")); + private void makePrevNotableAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { + String prevCases = previousCases.stream().collect(Collectors.joining(",")); String justification = Bundle.CrDataArtifactIngestModule_notableJustification(prevCases); Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_notableSetName()), new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); - makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification); } /** - * Makes a previously seen analysis result for a data artifact, unless the - * artifact is too common. + * Makes a previously seen analysis result for a content, unless the content + * is too common. * - * @param artifact The data artifact. + * @param content The content. * @param previousCases The names of the cases in which the artifact was * previously seen. * @param corrAttrType The type of the matched correlation attribute. @@ -278,103 +389,82 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo "# {0} - list of cases", "CrDataArtifactIngestModule_prevSeenJustification=Previously seen in cases {0}" }) - private void makePreviouslySeenAnalysisResult(DataArtifact artifact, List previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { - Score score; - int numCases = previousCases.size(); - if (numCases <= AnalysisParams.MAX_PREV_CASES_FOR_NOTABLE_SCORE) { - score = Score.SCORE_LIKELY_NOTABLE; - } else if (numCases > AnalysisParams.MAX_PREV_CASES_FOR_NOTABLE_SCORE && numCases <= AnalysisParams.MAX_PREV_CASES_FOR_PREV_SEEN) { - score = Score.SCORE_NONE; - } else { - /* - * Don't make the analysis result, the artifact is too common. - */ - return; + private void makePrevSeenAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { + Optional score = calculateScore(previousCases.size()); + if (score.isPresent()) { + String prevCases = previousCases.stream().collect(Collectors.joining(",")); + String justification = Bundle.CrDataArtifactIngestModule_prevSeenJustification(prevCases); + Collection analysisResultAttributes = Arrays.asList( + new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_prevSeenSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score.get(), justification); } - - String prevCases = previousCases.stream().distinct().collect(Collectors.joining(",")); - String justification = Bundle.CrDataArtifactIngestModule_prevSeenJustification(prevCases); - Collection analysisResultAttributes = Arrays.asList( - new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_prevSeenSetName()), - new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), - new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), - new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); - makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score, justification); } /** - * Makes a previously unseen analysis result for a data artifact. + * Makes a previously unseen analysis result for a content. * - * @param artifact The data artifact. + * @param content The content. * @param corrAttrType The type of the new correlation attribute. * @param corrAttrValue The value of the new correlation attribute. */ @NbBundle.Messages({ "CrDataArtifactIngestModule_prevUnseenJustification=Previously seen in zero cases" }) - private void makeAndPostPreviouslyUnseenArtifact(DataArtifact artifact, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { - Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( - TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), - corrAttrType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), - corrAttrValue)); - makeAndPostAnalysisResult(artifact, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification()); + private void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { + Collection attributesForNewArtifact = Arrays.asList( + new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue)); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification()); } /** - * Makes a new analysis result of a given type for a data artifact and posts - * it to the blackboard. + * Calculates a score based in a number of previous cases. * - * @param artifact The data artifact. + * @param numPreviousCases The number of previous cases. + * + * @return An Optional of a score, will be empty if there is no score + * because the number of previous cases is too high, indicating a + * common and therefore uninteresting item. + */ + private Optional calculateScore(int numPreviousCases) { + Score score = null; + if (numPreviousCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { + score = Score.SCORE_LIKELY_NOTABLE; + } else if (numPreviousCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numPreviousCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { + score = Score.SCORE_NONE; + } + return Optional.ofNullable(score); + } + + /** + * Makes a new analysis result of a given type for a content and posts it to + * the blackboard. + * + * @param content The content. * @param analysisResultType The type of analysis result to make. * @param analysisResultAttrs The attributes of the new analysis result. * @param configuration The configuration for the new analysis result. * @param score The score for the new analysis result. * @param justification The justification for the new analysis result. */ - private void makeAndPostAnalysisResult(DataArtifact artifact, BlackboardArtifact.Type analysisResultType, Collection analysisResultAttrs, String configuration, Score score, String justification) { + private void makeAndPostAnalysisResult(Content content, BlackboardArtifact.Type analysisResultType, Collection analysisResultAttrs, String configuration, Score score, String justification) { try { - if (!blackboard.artifactExists(artifact, analysisResultType, analysisResultAttrs)) { - AnalysisResult analysisResult = artifact.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs).getAnalysisResult(); + if (!blackboard.artifactExists(content, analysisResultType, analysisResultAttrs)) { + AnalysisResult analysisResult = content.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs).getAnalysisResult(); try { blackboard.postArtifact(analysisResult, CentralRepoIngestModuleFactory.getModuleName(), ingestJobId); } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, String.format("Error posting analysis result to blackboard (*s)", analysisResult), ex); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error posting analysis result '%s' to blackboard for content 's' (job ID=%d)", analysisResult, content, ingestJobId), ex); //NON-NLS } } } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Error creating analysis result", ex); // NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error creating %s analysis result for content '%s' (job ID=%d)", analysisResultType, content, ingestJobId), ex); // NON-NLS } } - @Override - public void shutDown() { - analyzeOsAccounts(); - - try { - centralRepo.commitAttributeInstancesBulk(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error doing final bulk commit of correlation attributes", ex); // NON-NLS - } - /* - * Data artifact ingest modules are shut down at the end of the ingest - * job. Now that the job is complete, ensure that the data source in the - * central repository that corresponds to the data source for the ingest - * job has hash values that match those in the case database. - */ - syncDataSourceHashes(); - /* - * Clear the relevant ingest job settings that were passed on to the - * case events listener for the central repository. Note that the - * listener's dependency on these settings currently means that it can - * only react to new OS account instances events when an ingest job with - * this module enabled is running. - */ - CaseEventListener.setCreateOsAcctCorrAttrs(saveCorrAttrs); - CaseEventListener.setFlagPrevSeenOsAccts(flagSeenDevices); - } - /** * Ensures the data source in the central repository has hash values that * match those in the case database. @@ -424,9 +514,9 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (job ID=%d)", dataSource.getName(), ingestJobId), ex); } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (obj_id=%d)", dataSource.getName(), dataSource.getId()), ex); + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (job ID=%d)", dataSource.getName(), ingestJobId), ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index 365d1cfd9e..600bb312f2 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -316,7 +316,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { prevCases)); try { // Create artifact if it doesn't already exist. - if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) { + if (!blackboard.artifactExists(abstractFile, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes)) { BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult( BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE, null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java index 574b034ab4..ae1533667b 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java @@ -69,7 +69,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION; @@ -327,7 +326,7 @@ class SevenZipExtractor { TSK_COMMENT, MODULE_NAME, details)); - if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_FILE_HIT, attributes)) { + if (!blackboard.artifactExists(archiveFile, BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, attributes)) { BlackboardArtifact artifact = rootArchive.getArchiveFile().newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE, diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 734eedaf35..2112e996f1 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -36,7 +36,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFil import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; @@ -194,7 +193,7 @@ public class FileTypeIdIngestModule implements FileIngestModule { Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard(); // Create artifact if it doesn't already exist. - if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) { + if (!tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, attributes)) { BlackboardArtifact artifact = file.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE, null, fileType.getInterestingFilesSetName(), null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java index c89e7893c6..d8091efd92 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java @@ -439,7 +439,7 @@ public class HashDbIngestModule implements FileIngestModule { attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), db.getDisplayName())); try { Blackboard tskBlackboard = skCase.getBlackboard(); - if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) { + if (tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_HASHSET_HIT, attributesList) == false) { postHashSetHitToBlackboard(file, file.getMd5Hash(), db, comment); } } catch (TskCoreException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java index ec259e933a..6e53534f5f 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java @@ -39,7 +39,6 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; @@ -142,7 +141,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule { ); // Create artifact if it doesn't already exist. - if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) { + if (!blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, attributes)) { BlackboardArtifact artifact = file.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT, Score.SCORE_LIKELY_NOTABLE, null, filesSet.getName(), null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java b/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java index 2806931f69..06a8717d86 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java @@ -48,7 +48,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.modules.pictureanalyzer.PictureAnalyzerIngestModuleFactory; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -68,7 +67,6 @@ import org.sleuthkit.datamodel.Score; public class EXIFProcessor implements PictureProcessor { private static final Logger logger = Logger.getLogger(EXIFProcessor.class.getName()); - private static final BlackboardArtifact.Type EXIF_METADATA = new BlackboardArtifact.Type(TSK_METADATA_EXIF); @Override @NbBundle.Messages({ @@ -151,7 +149,7 @@ public class EXIFProcessor implements PictureProcessor { final Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); - if (!attributes.isEmpty() && !blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) { + if (!attributes.isEmpty() && !blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_METADATA_EXIF, attributes)) { List artifacts = new ArrayList<>(); final BlackboardArtifact exifArtifact = (file.newAnalysisResult( BlackboardArtifact.Type.TSK_METADATA_EXIF, From bff3d4ddc776b2e1516f832fad625de49c7d0e9f Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 4 Nov 2021 12:23:43 -0400 Subject: [PATCH 009/142] 7895 CR data artifact ingest module --- .../datamodel/CorrelationAttributeUtil.java | 45 +++---------------- 1 file changed, 7 insertions(+), 38 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 624e668494..1c5a5feb9a 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -159,7 +159,7 @@ public class CorrelationAttributeUtil { * * @param accountAddr The OS account address. * - * @return True ofr false. + * @return True or false. */ private static boolean isSystemOsAccount(String accountAddr) { return accountAddr.equals("S-1-5-18") || accountAddr.equals("S-1-5-19") || accountAddr.equals("S-1-5-20"); @@ -830,43 +830,12 @@ public class CorrelationAttributeUtil { public static List makeCorrAttrsForSearch(OsAccountInstance osAccountInst) { List correlationAttrs = new ArrayList<>(); - if (CentralRepository.isEnabled()) { - OsAccount account = null; - DataSource dataSource = null; - if (osAccountInst != null) { - try { - account = osAccountInst.getOsAccount(); - dataSource = osAccountInst.getDataSource(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting information from OsAccountInstance.", ex); - } - } - if (account != null && dataSource != null) { - Optional accountAddr = account.getAddr(); - // Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system - // and they are not unique - if (accountAddr.isPresent() && !accountAddr.get().equals("S-1-5-18") && !accountAddr.get().equals("S-1-5-19") && !accountAddr.get().equals("S-1-5-20")) { - try { - - CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); - CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( - CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), - accountAddr.get(), - correlationCase, - CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource), - dataSource.getName(), - "", - TskData.FileKnown.KNOWN, - account.getId()); - correlationAttrs.add(correlationAttributeInstance); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS - } catch (NoCurrentCaseException ex) { - logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS - } - } + if (CentralRepository.isEnabled() && osAccountInst != null) { + try { + OsAccount osAccount = osAccountInst.getOsAccount(); + correlationAttrs.addAll(makeCorrAttrsToSave(osAccount)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error getting OS account from OS account instance '%s'", osAccountInst), ex); } } return correlationAttrs; From e03ccd4150e6aba3975b1b24d5553fa0c9b188b6 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 4 Nov 2021 12:50:25 -0400 Subject: [PATCH 010/142] 7895 CR data artifact ingest module --- .../eventlisteners/CaseEventListener.java | 12 ++-- .../CentralRepoFileIngestModule.java | 61 +++++++++---------- 2 files changed, 36 insertions(+), 37 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index a45c4a47d7..6610fcbd86 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -163,7 +163,7 @@ public final class CaseEventListener implements PropertyChangeListener { } /** - * Determines whether or not a tag definition has notable status. + * Determines whether or not a tag definition calls for notable status. * * @param tagDef The tag definition. * @@ -194,11 +194,11 @@ public final class CaseEventListener implements PropertyChangeListener { * Sets the notable (known) status of a central repository correlation * attribute corresponding to an artifact. * - * @param centralRepo The central repository. - * @param artifact The artifact. - * @param knownStatus The new notable status. + * @param centralRepo The central repository. + * @param artifact The artifact. + * @param notableStatus The new notable status. */ - private static void setArtifactKnownStatus(CentralRepository centralRepo, BlackboardArtifact artifact, TskData.FileKnown knownStatus) { + private static void setArtifactKnownStatus(CentralRepository centralRepo, BlackboardArtifact artifact, TskData.FileKnown notableStatus) { List corrAttrInstances = new ArrayList<>(); if (artifact instanceof DataArtifact) { corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) artifact)); @@ -207,7 +207,7 @@ public final class CaseEventListener implements PropertyChangeListener { } for (CorrelationAttributeInstance corrAttrInstance : corrAttrInstances) { try { - centralRepo.setAttributeInstanceKnownStatus(corrAttrInstance, knownStatus); + centralRepo.setAttributeInstanceKnownStatus(corrAttrInstance, notableStatus); } catch (CentralRepoException ex) { LOGGER.log(Level.SEVERE, String.format("Error setting correlation attribute instance known status", corrAttrInstance), ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index 600bb312f2..87d8a2db57 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -47,7 +47,6 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_PREVIOUSLY_NOTABLE; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; @@ -67,13 +66,13 @@ import org.sleuthkit.datamodel.Score; "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) final class CentralRepoFileIngestModule implements FileIngestModule { - private final static Logger LOGGER = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); + private final static Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); private long jobId; - private CorrelationCase eamCase; - private CorrelationDataSource eamDataSource; + private CorrelationCase centralRepoCase; + private CorrelationDataSource centralRepoDataSource; private CorrelationAttributeInstance.Type filesType; private final boolean flagTaggedNotableItems; private Blackboard blackboard; @@ -104,7 +103,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); } catch (NoCurrentCaseException ex) { - LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); + logger.log(Level.SEVERE, "Exception while getting open case.", ex); return ProcessResult.ERROR; } @@ -120,7 +119,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { dbManager = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); + logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); return ProcessResult.ERROR; } @@ -148,10 +147,10 @@ final class CentralRepoFileIngestModule implements FileIngestModule { postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5); } } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS + logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS return ProcessResult.ERROR; } } @@ -162,8 +161,8 @@ final class CentralRepoFileIngestModule implements FileIngestModule { CorrelationAttributeInstance cefi = new CorrelationAttributeInstance( filesType, md5, - eamCase, - eamDataSource, + centralRepoCase, + centralRepoDataSource, abstractFile.getParentPath() + abstractFile.getName(), null, TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database. @@ -171,10 +170,10 @@ final class CentralRepoFileIngestModule implements FileIngestModule { abstractFile.getId()); dbManager.addAttributeInstanceBulk(cefi); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; } } @@ -183,26 +182,26 @@ final class CentralRepoFileIngestModule implements FileIngestModule { @Override public void shutDown() { - if ((CentralRepository.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) { + if ((CentralRepository.isEnabled() == false) || (centralRepoCase == null) || (centralRepoDataSource == null)) { return; } CentralRepository dbManager; try { dbManager = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); + logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); return; } try { dbManager.commitAttributeInstancesBulk(); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS } try { - Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamDataSource); - LOGGER.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS + Long count = dbManager.getCountArtifactInstancesByCaseDataSource(centralRepoDataSource); + logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, centralRepoCase.getDisplayName(), centralRepoDataSource.getName()}); // NON-NLS } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS } // TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk. @@ -235,14 +234,14 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { autopsyCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { - LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex); + logger.log(Level.SEVERE, "Exception while getting open case.", ex); throw new IngestModuleException("Exception while getting open case.", ex); } // Don't allow sqlite central repo databases to be used for multi user cases if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { - LOGGER.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository."); + logger.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository."); throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS } jobId = context.getJobId(); @@ -251,27 +250,27 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { centralRepoDb = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS } try { filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS } try { - eamCase = centralRepoDb.getCase(autopsyCase); + centralRepoCase = centralRepoDb.getCase(autopsyCase); } catch (CentralRepoException ex) { throw new IngestModuleException("Unable to get case from central repository database ", ex); } try { - eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource()); + centralRepoDataSource = CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource()); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS } // TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter @@ -281,11 +280,11 @@ final class CentralRepoFileIngestModule implements FileIngestModule { == 1) { // ensure we have this data source in the EAM DB try { - if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) { - centralRepoDb.newDataSource(eamDataSource); + if (null == centralRepoDb.getDataSource(centralRepoCase, centralRepoDataSource.getDataSourceObjectID())) { + centralRepoDb.newDataSource(centralRepoDataSource); } } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS + logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS } @@ -324,15 +323,15 @@ final class CentralRepoFileIngestModule implements FileIngestModule { try { blackboard.postArtifact(tifArtifact, CentralRepoIngestModuleFactory.getModuleName(), jobId); } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS + logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS } // send inbox message sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash(), caseDisplayNames); } } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS + logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS } catch (IllegalStateException ex) { - LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS + logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS } } From 15702450e20305447befb4c62ddd3286261e1abc Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 4 Nov 2021 13:06:49 -0400 Subject: [PATCH 011/142] 7895 CR data artifact ingest module --- .../ingestmodule/CentralRepoFileIngestModule.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index 87d8a2db57..ab02f9d849 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -67,6 +67,7 @@ import org.sleuthkit.datamodel.Score; final class CentralRepoFileIngestModule implements FileIngestModule { private final static Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); + private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); @@ -302,16 +303,16 @@ final class CentralRepoFileIngestModule implements FileIngestModule { String justification = "Previously marked as notable in cases " + prevCases; Collection attributes = Arrays.asList( new BlackboardAttribute( - TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), + TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_prevTaggedSet_text()), new BlackboardAttribute( - TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), + TSK_CORRELATION_TYPE, MODULE_NAME, aType.getDisplayName()), new BlackboardAttribute( - TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), + TSK_CORRELATION_VALUE, MODULE_NAME, value), new BlackboardAttribute( - TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), + TSK_OTHER_CASES, MODULE_NAME, prevCases)); try { // Create artifact if it doesn't already exist. @@ -321,7 +322,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) .getAnalysisResult(); try { - blackboard.postArtifact(tifArtifact, CentralRepoIngestModuleFactory.getModuleName(), jobId); + blackboard.postArtifact(tifArtifact, MODULE_NAME, jobId); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS } @@ -361,7 +362,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { detailsSb.append(str).append("
"); } detailsSb.append(""); - services.postMessage(IngestMessage.createDataMessage(CentralRepoIngestModuleFactory.getModuleName(), + services.postMessage(IngestMessage.createDataMessage(MODULE_NAME, Bundle.CentralRepoIngestModule_postToBB_knownBadMsg(name), detailsSb.toString(), name + md5Hash, From 826b772bb183e69f175e05344d77d0e6de4a6cec Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 4 Nov 2021 14:17:34 -0400 Subject: [PATCH 012/142] 7895 CR data artifact ingest module --- .../datamodel/CorrelationDataSource.java | 73 ++++---- .../ingestmodule/Bundle.properties-MERGED | 32 ++-- .../CentralRepoDataArtifactIngestModule.java | 59 +++--- .../CentralRepoFileIngestModule.java | 176 ++++++------------ 4 files changed, 143 insertions(+), 197 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java index 92c23d77c8..9bbccb3317 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java @@ -1,7 +1,7 @@ /* * Central Repository * - * Copyright 2015-2019 Basis Technology Corp. + * Copyright 2015-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -98,32 +98,33 @@ public class CorrelationDataSource implements Serializable { } /** - * Create a CorrelationDataSource object from a TSK Content object. This - * will add it to the central repository. + * Creates a central repository data source object from a case database data + * source. If the data source is not already present in the central + * repository, it is added. * - * @param correlationCase the current CorrelationCase used for ensuring - * uniqueness of DataSource - * @param dataSource the sleuthkit datasource that is being added to - * the central repository + * @param correlationCase The central repository case associated with the + * data aosurce. + * @param dataSource The case database data source. * - * @return + * @return The cnetral repository data source. * - * @throws CentralRepoException + * @throws CentralRepoException This exception is thrown if there is an + * error creating the central repository data + * source. */ public static CorrelationDataSource fromTSKDataSource(CorrelationCase correlationCase, Content dataSource) throws CentralRepoException { + if (!CentralRepository.isEnabled()) { + throw new CentralRepoException("Central repository is not enabled, cannot create central repository data source, "); + } + Case curCase; try { curCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { - throw new CentralRepoException("Autopsy case is closed"); - } - - CorrelationDataSource correlationDataSource = null; - boolean useCR = CentralRepository.isEnabled(); - if (useCR) { - correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId()); + throw new CentralRepoException("Error getting current case", ex); } + CorrelationDataSource correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId()); if (correlationDataSource == null) { String deviceId; String md5 = null; @@ -131,7 +132,7 @@ public class CorrelationDataSource implements Serializable { String sha256 = null; try { deviceId = curCase.getSleuthkitCase().getDataSource(dataSource.getId()).getDeviceId(); - + if (dataSource instanceof Image) { Image image = (Image) dataSource; md5 = image.getMd5(); @@ -139,15 +140,13 @@ public class CorrelationDataSource implements Serializable { sha256 = image.getSha256(); } } catch (TskDataException | TskCoreException ex) { - throw new CentralRepoException("Error getting data source info: " + ex.getMessage()); + throw new CentralRepoException("Error getting data source info from case database", ex); } correlationDataSource = new CorrelationDataSource(correlationCase, deviceId, dataSource.getName(), dataSource.getId(), md5, sha1, sha256); - if (useCR) { - //add the correlation data source to the central repository and fill in the Central repository data source id in the object - correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource); - } + correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource); } + return correlationDataSource; } @@ -205,66 +204,68 @@ public class CorrelationDataSource implements Serializable { public String getName() { return name; } - + /** * @return the MD5 hash value */ public String getMd5() { return (md5Hash == null ? "" : md5Hash); } - + /** - * Set the MD5 hash value and persist to the Central Repository if available. - * + * Set the MD5 hash value and persist to the Central Repository if + * available. + * * @param md5Hash The MD5 hash value. + * * @throws CentralRepoException If there's an issue updating the Central - Repository. + * Repository. */ public void setMd5(String md5Hash) throws CentralRepoException { this.md5Hash = md5Hash; - + if (dataSourceObjectID != -1) { CentralRepository.getInstance().updateDataSourceMd5Hash(this); } } - + /** * @return the SHA-1 hash value */ public String getSha1() { return (sha1Hash == null ? "" : sha1Hash); } - + /** * Set the SHA-1 hash value and persist to the Central Repository if * available. - * + * * @param sha1Hash The SHA-1 hash value. */ public void setSha1(String sha1Hash) throws CentralRepoException { this.sha1Hash = sha1Hash; - + if (dataSourceObjectID != -1) { CentralRepository.getInstance().updateDataSourceSha1Hash(this); } } - + /** * @return the SHA-256 hash value */ public String getSha256() { return (sha256Hash == null ? "" : sha256Hash); } - + /** * Set the SHA-256 hash value and persist to the Central Repository if * available. - * + * * @param sha256Hash The SHA-256 hash value. */ public void setSha256(String sha256Hash) throws CentralRepoException { this.sha256Hash = sha256Hash; - + if (dataSourceObjectID != -1) { CentralRepository.getInstance().updateDataSourceSha256Hash(this); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED index f30a0b6309..de6d91c29f 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED @@ -1,27 +1,29 @@ CentralRepoIngestModel_name_header=Name:
CentralRepoIngestModel_previous_case_header=
Previous Cases:
-CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module. -CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized CentralRepoIngestModule.prevCaseComment.text=Previous Case: CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository) +CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository +CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository +CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository +CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository +CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled +CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository +CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
+# {0} - list of cases +CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0} +CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) +CentralRepoIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager # {0} - Name of file that is Notable CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0} +# {0} - list of cases +CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0} +CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository) +CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases +CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository) +CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases CentralRepoIngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation CentralRepoIngestModuleFactory.ingestmodule.name=Central Repository -CrDataArtifactIngestModule_crInaccessibleErrMsg=Error accessing central repository -CrDataArtifactIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled -CrDataArtifactIngestModule_noCurrentCaseErrMsg=Error getting current case -# {0} - list of cases -CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0} -CrDataArtifactIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) -CrDataArtifactIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager -# {0} - list of cases -CrDataArtifactIngestModule_prevSeenJustification=Previously seen in cases {0} -CrDataArtifactIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository) -CrDataArtifactIngestModule_prevSeenOsAcctSetName=Users seen in previous cases -CrDataArtifactIngestModule_prevSeenSetName=Previously Seen (Central Repository) -CrDataArtifactIngestModule_prevUnseenJustification=Previously seen in zero cases IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings IngestSettingsPanel.flagTaggedNotableItemsCheckbox.text=Flag items previously tagged as notable IngestSettingsPanel.flagPreviouslySeenDevicesCheckbox.text=Flag devices and users previously seen in other cases diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index a44c33391a..f1610659f9 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -33,7 +33,9 @@ import org.apache.commons.lang3.StringUtils; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; @@ -103,16 +105,24 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } @NbBundle.Messages({ - "CrDataArtifactIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled", - "CrDataArtifactIngestModule_noCurrentCaseErrMsg=Error getting current case", - "CrDataArtifactIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager", - "CrDataArtifactIngestModule_crInaccessibleErrMsg=Error accessing central repository",}) + "CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled", + "CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case", + "CentralRepoIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager", + "CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository", + "CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository" + }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { + /* + * IMPORTANT: Start up IngestModuleException messages are displayed to + * the user, if a user is present. Therefore, an exception to the policy + * that exception messages are not localized is appropriate here. Also, + * the exception messages should be user-friendly. + */ dataSource = context.getDataSource(); ingestJobId = context.getJobId(); if (!CentralRepository.isEnabled()) { - throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crNotEnabledErrMsg()); // May be displayed to user. + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); // May be displayed to user. } try { currentCase = Case.getCurrentCaseThrows(); @@ -121,12 +131,17 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo osAccountMgr = tskCase.getOsAccountManager(); centralRepo = CentralRepository.getInstance(); } catch (NoCurrentCaseException ex) { - throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_noCurrentCaseErrMsg(), ex); // May be displayed to user. + throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex); } catch (TskCoreException ex) { - throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_osAcctMgrInaccessibleErrMsg(), ex); // May be displayed to user. + throw new IngestModuleException(Bundle.CentralRepoIngestModule_osAcctMgrInaccessibleErrMsg(), ex); } catch (CentralRepoException ex) { - throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crInaccessibleErrMsg(), ex); // May be displayed to user. + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex); } + // Don't allow sqlite central repo databases to be used for multi user cases + if ((currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crDatabaseTypeMismatch()); + } + } /** @@ -187,8 +202,8 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * accounts if they have been seen in other cases. */ @NbBundle.Messages({ - "CrDataArtifactIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", - "CrDataArtifactIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" + "CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", + "CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" }) private void analyzeOsAccounts() { try { @@ -360,14 +375,14 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * @param corrAttrValue The value of the matched correlation attribute. */ @NbBundle.Messages({ - "CrDataArtifactIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)", + "CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)", "# {0} - list of cases", - "CrDataArtifactIngestModule_notableJustification=Previously marked as notable in cases {0}" + "CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}" }) private void makePrevNotableAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { String prevCases = previousCases.stream().collect(Collectors.joining(",")); - String justification = Bundle.CrDataArtifactIngestModule_notableJustification(prevCases); - Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_notableSetName()), + String justification = Bundle.CentralRepoIngestModule_notableJustification(prevCases); + Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CentralRepoIngestModule_notableSetName()), new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); @@ -385,17 +400,17 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * @param corrAttrValue The value of the matched correlation attribute. */ @NbBundle.Messages({ - "CrDataArtifactIngestModule_prevSeenSetName=Previously Seen (Central Repository)", + "CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)", "# {0} - list of cases", - "CrDataArtifactIngestModule_prevSeenJustification=Previously seen in cases {0}" + "CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}" }) private void makePrevSeenAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { Optional score = calculateScore(previousCases.size()); if (score.isPresent()) { String prevCases = previousCases.stream().collect(Collectors.joining(",")); - String justification = Bundle.CrDataArtifactIngestModule_prevSeenJustification(prevCases); + String justification = Bundle.CentralRepoIngestModule_prevSeenJustification(prevCases); Collection analysisResultAttributes = Arrays.asList( - new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CrDataArtifactIngestModule_prevSeenSetName()), + new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CentralRepoIngestModule_prevSeenSetName()), new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); @@ -411,13 +426,13 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * @param corrAttrValue The value of the new correlation attribute. */ @NbBundle.Messages({ - "CrDataArtifactIngestModule_prevUnseenJustification=Previously seen in zero cases" + "CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases" }) private void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { Collection attributesForNewArtifact = Arrays.asList( new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue)); - makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CrDataArtifactIngestModule_prevUnseenJustification()); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CentralRepoIngestModule_prevUnseenJustification()); } /** @@ -437,8 +452,8 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo score = Score.SCORE_NONE; } return Optional.ofNullable(score); - } - + } + /** * Makes a new analysis result of a given type for a content and posts it to * the blackboard. diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index ab02f9d849..a6a8e54d62 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -32,11 +32,7 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; -import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms; -import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager; -import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.autopsy.ingest.FileIngestModule; @@ -59,18 +55,18 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; import org.sleuthkit.datamodel.Score; /** - * Ingest module for inserting entries into the Central Repository database on - * ingest of a data source + * A file ingest module that adds correlation attributes for files to the + * central repository and makes previously notable analysis results based on + * previous occurences. */ @Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) final class CentralRepoFileIngestModule implements FileIngestModule { - private final static Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); + private static final Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); - private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter(); private long jobId; private CorrelationCase centralRepoCase; private CorrelationDataSource centralRepoDataSource; @@ -78,11 +74,14 @@ final class CentralRepoFileIngestModule implements FileIngestModule { private final boolean flagTaggedNotableItems; private Blackboard blackboard; private final boolean createCorrelationProperties; + private CentralRepository centralRepoDb; /** - * Instantiate the Central Repository ingest module. + * Constructs a file ingest module that adds correlation attributes for + * files to the central repository and makes previously notable analysis + * results based on previous occurences. * - * @param settings The ingest settings for the module instance. + * @param settings The ingest job settings. */ CentralRepoFileIngestModule(IngestSettings settings) { flagTaggedNotableItems = settings.isFlagTaggedNotableItems(); @@ -91,22 +90,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { @Override public ProcessResult process(AbstractFile abstractFile) { - if (CentralRepository.isEnabled() == false) { - /* - * Not signaling an error for now. This is a workaround for the way - * all newly didscovered ingest modules are automatically anabled. - * - * TODO (JIRA-2731): Add isEnabled API for ingest modules. - */ - return ProcessResult.OK; - } - - try { - blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); - } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Exception while getting open case.", ex); - return ProcessResult.ERROR; - } if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { return ProcessResult.OK; @@ -116,15 +99,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { return ProcessResult.OK; } - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); - return ProcessResult.ERROR; - } - - // only continue if we are correlating filesType if (!filesType.isEnabled()) { return ProcessResult.OK; } @@ -142,7 +116,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) { try { TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query"); - List caseDisplayNamesList = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5); + List caseDisplayNamesList = centralRepoDb.getListCasesHavingArtifactInstancesKnownBad(filesType, md5); HealthMonitor.submitTimingMetric(timingMetric); if (!caseDisplayNamesList.isEmpty()) { postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5); @@ -169,7 +143,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database. , abstractFile.getId()); - dbManager.addAttributeInstanceBulk(cefi); + centralRepoDb.addAttributeInstanceBulk(cefi); } catch (CentralRepoException ex) { logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS return ProcessResult.ERROR; @@ -183,113 +157,68 @@ final class CentralRepoFileIngestModule implements FileIngestModule { @Override public void shutDown() { - if ((CentralRepository.isEnabled() == false) || (centralRepoCase == null) || (centralRepoDataSource == null)) { - return; + if (refCounter.decrementAndGet(jobId) == 0) { + try { + centralRepoDb.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error committing bulk insert of correlation attributes", ex); // NON-NLS + } } - CentralRepository dbManager; - try { - dbManager = CentralRepository.getInstance(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex); - return; - } - try { - dbManager.commitAttributeInstancesBulk(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS - } - try { - Long count = dbManager.getCountArtifactInstancesByCaseDataSource(centralRepoDataSource); - logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, centralRepoCase.getDisplayName(), centralRepoDataSource.getName()}); // NON-NLS - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS - } - - // TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk. - refCounter.decrementAndGet(jobId); } - // see ArtifactManagerTimeTester for details @Messages({ - "CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized", - "CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module." + "CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository", + "CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository", + "CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository" }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { - if (CentralRepository.isEnabled() == false) { - /* - * Not throwing the customary exception for now. This is a - * workaround for the way all newly didscovered ingest modules are - * automatically anabled. - * - * TODO (JIRA-2731): Add isEnabled API for ingest modules. - */ - if (RuntimeProperties.runningWithGUI()) { - if (1L == warningMsgRefCounter.incrementAndGet(jobId)) { - MessageNotifyUtil.Notify.warn(Bundle.CentralRepoIngestModule_notfyBubble_title(), Bundle.CentralRepoIngestModule_errorMessage_isNotEnabled()); - } - } - return; + jobId = context.getJobId(); + + /* + * IMPORTANT: Start up IngestModuleException messages are displayed to + * the user, if a user is present. Therefore, an exception to the policy + * that exception messages are not localized is appropriate here. Also, + * the exception messages should be user-friendly. + */ + if (!CentralRepository.isEnabled()) { + throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crNotEnabledErrMsg()); } + Case autopsyCase; try { autopsyCase = Case.getCurrentCaseThrows(); } catch (NoCurrentCaseException ex) { - logger.log(Level.SEVERE, "Exception while getting open case.", ex); - throw new IngestModuleException("Exception while getting open case.", ex); + throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_noCurrentCaseErrMsg(), ex); } - // Don't allow sqlite central repo databases to be used for multi user cases - if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) - && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { - logger.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository."); - throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS - } - jobId = context.getJobId(); + blackboard = autopsyCase.getSleuthkitCase().getBlackboard(); - CentralRepository centralRepoDb; try { centralRepoDb = CentralRepository.getInstance(); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS - throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex); } try { filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS - throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS + throw new IngestModuleException(Bundle.CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg(), ex); } try { centralRepoCase = centralRepoDb.getCase(autopsyCase); } catch (CentralRepoException ex) { - throw new IngestModuleException("Unable to get case from central repository database ", ex); + throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrCaseErrMsg(), ex); } try { centralRepoDataSource = CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource()); } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS - throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS + throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrDataSourceErrMsg(), ex); } - // TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter - // if we are the first thread / module for this job, then make sure the case - // and image exist in the DB before we associate artifacts with it. - if (refCounter.incrementAndGet(jobId) - == 1) { - // ensure we have this data source in the EAM DB - try { - if (null == centralRepoDb.getDataSource(centralRepoCase, centralRepoDataSource.getDataSourceObjectID())) { - centralRepoDb.newDataSource(centralRepoDataSource); - } - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS - throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS - } - } + refCounter.incrementAndGet(jobId); } /** @@ -298,19 +227,18 @@ final class CentralRepoFileIngestModule implements FileIngestModule { * @param abstractFile The file from which to create an artifact. * @param caseDisplayNames Case names to be added to a TSK_COMMON attribute. */ - private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List caseDisplayNames, CorrelationAttributeInstance.Type aType, String value) { + private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List caseDisplayNames, CorrelationAttributeInstance.Type corrAtrrType, String corrAttrValue) { String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); String justification = "Previously marked as notable in cases " + prevCases; - Collection attributes = Arrays.asList( - new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.CentralRepoIngestModule_prevTaggedSet_text()), + Collection attributes = Arrays.asList(new BlackboardAttribute( + TSK_SET_NAME, MODULE_NAME, + Bundle.CentralRepoIngestModule_prevTaggedSet_text()), new BlackboardAttribute( TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), + corrAtrrType.getDisplayName()), new BlackboardAttribute( TSK_CORRELATION_VALUE, MODULE_NAME, - value), + corrAttrValue), new BlackboardAttribute( TSK_OTHER_CASES, MODULE_NAME, prevCases)); @@ -336,14 +264,6 @@ final class CentralRepoFileIngestModule implements FileIngestModule { } } - @Messages({ - "CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
", - "CentralRepoIngestModel_name_header=Name:
", - "CentralRepoIngestModel_previous_case_header=
Previous Cases:
", - "# {0} - Name of file that is Notable", - "CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}" - }) - /** * Post a message to the ingest inbox alerting the user that a bad file was * found. @@ -353,6 +273,13 @@ final class CentralRepoFileIngestModule implements FileIngestModule { * @param md5Hash badFile's md5 hash * @param caseDisplayNames List of cases that the artifact appears in. */ + @Messages({ + "CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
", + "CentralRepoIngestModel_name_header=Name:
", + "CentralRepoIngestModel_previous_case_header=
Previous Cases:
", + "# {0} - Name of file that is Notable", + "CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}" + }) private void sendBadFileInboxMessage(BlackboardArtifact artifact, String name, String md5Hash, List caseDisplayNames) { StringBuilder detailsSb = new StringBuilder(1024); @@ -368,4 +295,5 @@ final class CentralRepoFileIngestModule implements FileIngestModule { name + md5Hash, artifact)); } + } From 4c453fed9c904fdbcb3214d92f4e2b41f88c8b5a Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 5 Nov 2021 11:10:51 -0400 Subject: [PATCH 013/142] 7895 CR data artifact ingest module --- .../autopsy/actions/Bundle.properties-MERGED | 7 - .../casemodule/Bundle.properties-MERGED | 27 +- .../Bundle.properties-MERGED | 5 +- .../contentviewer/Bundle.properties-MERGED | 3 - .../datamodel/CorrelationAttributeUtil.java | 14 +- .../datamodel/CorrelationDataSource.java | 7 +- .../CentralRepoDataArtifactIngestModule.java | 17 +- .../contentviewers/Bundle.properties-MERGED | 1337 ++++++++--------- .../autopsy/core/Bundle.properties-MERGED | 8 +- .../corecomponents/Bundle.properties-MERGED | 6 +- .../coreutils/Bundle.properties-MERGED | 4 +- .../datamodel/Bundle.properties-MERGED | 8 +- .../filesearch/Bundle.properties-MERGED | 4 +- .../autopsy/ingest/Bundle.properties-MERGED | 2 +- .../Bundle.properties-MERGED | 7 +- .../fileextmismatch/Bundle.properties-MERGED | 18 +- .../hashdatabase/Bundle.properties-MERGED | 10 +- .../interestingitems/Bundle.properties-MERGED | 5 +- .../photoreccarver/Bundle.properties-MERGED | 2 +- .../modules/html/Bundle.properties-MERGED | 6 +- .../configuration/Bundle.properties-MERGED | 4 + .../netbeans/core/startup/Bundle.properties | 2 +- .../core/windows/view/ui/Bundle.properties | 2 +- 23 files changed, 701 insertions(+), 804 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED index 5c9a0ea3ac..b20993c416 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED @@ -6,19 +6,14 @@ AddBlackboardArtifactTagAction.unableToTag.msg=Unable to tag {0}. AddContentTagAction.cannotApplyTagErr=Cannot Apply Tag AddContentTagAction.pluralTagFile=Add File Tags AddContentTagAction.singularTagFile=Add File Tag -# {0} - fileName -# {1} - tagName AddContentTagAction.tagExists={0} has been tagged as {1}. Cannot reapply the same tag. AddContentTagAction.taggingErr=Tagging Error -# {0} - fileName AddContentTagAction.unableToTag.msg=Unable to tag {0}, not a regular file. -# {0} - fileName AddContentTagAction.unableToTag.msg2=Unable to tag {0}. CTL_DumpThreadAction=Thread Dump CTL_ShowIngestProgressSnapshotAction=Ingest Status Details DeleteBlackboardArtifactTagAction.deleteTag=Remove Selected Tag(s) DeleteBlackboardArtifactTagAction.tagDelErr=Tag Deletion Error -# {0} - tagName DeleteBlackboardArtifactTagAction.unableToDelTag.msg=Unable to delete tag {0}. DeleteContentTagAction.deleteTag=Remove Selected Tag(s) DeleteContentTagAction.tagDelErr=Tag Deletion Error @@ -84,8 +79,6 @@ CTL_OpenOutputFolder=Open Case Folder OpenOutputFolder.error1=Case Folder Not Found: {0} OpenOutputFolder.noCaseOpen=No open case, therefore no current case folder available. OpenOutputFolder.CouldNotOpenOutputFolder=Could not open case folder -# {0} - old tag name -# {1} - artifactID ReplaceBlackboardArtifactTagAction.replaceTag.alert=Unable to replace tag {0} for artifact {1}. # {0} - old tag name # {1} - content obj id diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED index 528d3a5088..9a43ffe229 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED @@ -247,15 +247,10 @@ AddImageWizardIngestConfigPanel.dsProcDone.errs.text=*Errors encountered in addi AddImageWizardIngestConfigVisual.getName.text=Configure Ingest AddImageWizardIterator.stepXofN=Step {0} of {1} AddLocalFilesTask.localFileAdd.progress.text=Adding: {0}/{1} -Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open\! +Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open! Case.open.msgDlg.updated.msg=Updated case database schema.\nA backup copy of the database with the following path has been made:\n {0} Case.open.msgDlg.updated.title=Case Database Schema Update -Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \n\ -this case are missing. Would you like to search for them now?\n\ -Previously, the image was located at:\n\ -{0}\n\ -Please note that you will still be able to browse directories and generate reports\n\ -if you choose No, but you will not be able to view file content or run the ingest process. +Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \nthis case are missing. Would you like to search for them now?\nPreviously, the image was located at:\n{0}\nPlease note that you will still be able to browse directories and generate reports\nif you choose No, but you will not be able to view file content or run the ingest process. Case.checkImgExist.confDlg.doesntExist.title=Missing Image Case.addImg.exception.msg=Error adding image to the case Case.updateCaseName.exception.msg=Error while trying to update the case name. @@ -274,12 +269,9 @@ Case.GetCaseTypeGivenPath.Failure=Unable to get case type Case.metaDataFileCorrupt.exception.msg=The case metadata file (.aut) is corrupted. Case.deleteReports.deleteFromDiskException.log.msg=Unable to delete the report from the disk. Case.deleteReports.deleteFromDiskException.msg=Unable to delete the report {0} from the disk.\nYou may manually delete it from {1} -CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \n\ - Case Name: {0}\n\ - Case Directory: {1} +CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \nCase Name: {0}\nCase Directory: {1} CaseDeleteAction.closeConfMsg.title=Warning: Closing the Current Case -CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\n\ -Close the folder and file and try again or you can delete the case manually. +CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\nClose the folder and file and try again or you can delete the case manually. CaseDeleteAction.msgDlg.fileInUse.title=Error: Folder In Use CaseDeleteAction.msgDlg.caseDelete.msg=Case {0} has been deleted. CaseOpenAction.autFilter.title={0} Case File ( {1}) @@ -311,8 +303,7 @@ NewCaseWizardAction.databaseProblem1.text=Cannot open database. Cancelling case NewCaseWizardAction.databaseProblem2.text=Error NewCaseWizardPanel1.validate.errMsg.invalidSymbols=The Case Name cannot contain any of the following symbols: \\ / : * ? " < > | NewCaseWizardPanel1.validate.errMsg.dirExists=Case directory ''{0}'' already exists. -NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\n\ - Do you want to create that directory? +NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\nDo you want to create that directory? NewCaseWizardPanel1.validate.confMsg.createDir.title=Create directory NewCaseWizardPanel1.validate.errMsg.cantCreateParDir.msg=Error: Could not create case parent directory {0} NewCaseWizardPanel1.validate.errMsg.prevCreateBaseDir.msg=Prevented from creating base directory {0} @@ -341,7 +332,6 @@ OptionalCasePropertiesPanel.lbPointOfContactPhoneLabel.text=Phone: OptionalCasePropertiesPanel.orgainizationPanel.border.title=Organization RecentCases.exception.caseIdxOutOfRange.msg=Recent case index {0} is out of range. RecentCases.getName.text=Clear Recent Cases -# {0} - case name RecentItems.openRecentCase.msgDlg.text=Case {0} no longer exists. SelectDataSourceProcessorPanel.name.text=Select Data Source Type StartupWindow.title.text=Welcome @@ -354,7 +344,6 @@ StartupWindowProvider.openCase.noFile=Unable to open previously open case becaus UnpackagePortableCaseDialog.title.text=Unpackage Portable Case UnpackagePortableCaseDialog.UnpackagePortableCaseDialog.extensions=Portable case package (.zip, .zip.001) UnpackagePortableCaseDialog.validatePaths.badExtension=File extension must be .zip or .zip.001 -# {0} - case folder UnpackagePortableCaseDialog.validatePaths.caseFolderExists=Folder {0} already exists UnpackagePortableCaseDialog.validatePaths.caseIsNotFile=Selected path is not a file UnpackagePortableCaseDialog.validatePaths.caseNotFound=File does not exist @@ -369,8 +358,8 @@ UnpackageWorker.doInBackground.previouslySeenCase=Case has been previously opene UpdateRecentCases.menuItem.clearRecentCases.text=Clear Recent Cases UpdateRecentCases.menuItem.empty=-Empty- AddImageWizardIngestConfigPanel.CANCEL_BUTTON.text=Cancel -NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on \"C:\" drive -NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on \"C:\" drive. Case folder is created on the target system +NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on "C:" drive +NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on "C:" drive. Case folder is created on the target system NewCaseVisualPanel1.CaseFolderOnInternalDriveLinuxError.text=Warning: Path to case folder is on the target system. Create case folder in mounted drive. NewCaseVisualPanel1.uncPath.error=Error: UNC paths are not allowed for Single-User cases CollaborationMonitor.addingDataSourceStatus.msg={0} adding data source @@ -378,7 +367,7 @@ CollaborationMonitor.analyzingDataSourceStatus.msg={0} analyzing {1} MissingImageDialog.lbWarning.text= MissingImageDialog.lbWarning.toolTipText= NewCaseVisualPanel1.caseParentDirWarningLabel.text= -NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-User +NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-User\t\t NewCaseVisualPanel1.singleUserCaseRadioButton.text=Single-User NewCaseVisualPanel1.caseTypeLabel.text=Case Type: SingleUserCaseConverter.BadDatabaseFileName=Database file does not exist! diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED index b4f7f835ef..b2320b5408 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED @@ -5,10 +5,7 @@ CentralRepoCommentDialog.title.addEditCentralRepoComment=Add/Edit Central Reposi OpenIDE-Module-Name=Central Repository OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Short-Description=Central Repository Ingest Module -OpenIDE-Module-Long-Description=\ - Central Repository ingest module and central database. \n\n\ - The Central Repository ingest module stores attributes of artifacts matching selected correlation types into a central database.\n\ - Stored attributes are used in future cases to correlate and analyzes files and artifacts during ingest. +OpenIDE-Module-Long-Description=Central Repository ingest module and central database. \n\nThe Central Repository ingest module stores attributes of artifacts matching selected correlation types into a central database.\nStored attributes are used in future cases to correlate and analyzes files and artifacts during ingest. CentralRepoCommentDialog.commentLabel.text=Comment: CentralRepoCommentDialog.okButton.text=&OK CentralRepoCommentDialog.cancelButton.text=C&ancel diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED index a97cc319da..cccefccf80 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED @@ -24,9 +24,6 @@ OtherOccurrencesPanel.caseDetailsDialog.noCaseNameError=Error OtherOccurrencesPanel.caseDetailsDialog.noDetails=No details for this case. OtherOccurrencesPanel.caseDetailsDialog.noDetailsReference=No case details for Global reference properties. OtherOccurrencesPanel.caseDetailsDialog.notSelected=No Row Selected -# {0} - commonality percentage -# {1} - correlation type -# {2} - correlation value OtherOccurrencesPanel.correlatedArtifacts.byType={0}% of data sources have {2} (type: {1})\n OtherOccurrencesPanel.correlatedArtifacts.failed=Failed to get frequency details. OtherOccurrencesPanel.correlatedArtifacts.isEmpty=There are no files or artifacts to correlate. diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 1c5a5feb9a..06054a31c5 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -115,19 +115,20 @@ public class CorrelationAttributeUtil { } /** - * Gets the correlation attributes for an OS account. + * Gets the correlation attributes for an OS account instance represented as + * an OS account plus a data source. * - * @param account The OS account. + * @param account The OS account. + * @param dataSource The data source. * * @return The correlation attributes. */ - public static List makeCorrAttrsToSave(OsAccount account) { + public static List makeCorrAttrsToSave(OsAccount account, Content dataSource) { List correlationAttrs = new ArrayList<>(); if (CentralRepository.isEnabled()) { Optional accountAddr = account.getAddr(); if (accountAddr.isPresent() && !isSystemOsAccount(accountAddr.get())) { try { - Content dataSource = account.getDataSource(); CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows()); CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance( CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID), @@ -139,8 +140,6 @@ public class CorrelationAttributeUtil { TskData.FileKnown.KNOWN, account.getId()); correlationAttrs.add(correlationAttributeInstance); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Error getting data source for OS account '%s'", accountAddr.get()), ex); //NON-NLS } catch (CentralRepoException ex) { logger.log(Level.SEVERE, String.format("Error querying central repository for OS account '%s'", accountAddr.get()), ex); //NON-NLS } catch (NoCurrentCaseException ex) { @@ -832,8 +831,7 @@ public class CorrelationAttributeUtil { List correlationAttrs = new ArrayList<>(); if (CentralRepository.isEnabled() && osAccountInst != null) { try { - OsAccount osAccount = osAccountInst.getOsAccount(); - correlationAttrs.addAll(makeCorrAttrsToSave(osAccount)); + correlationAttrs.addAll(makeCorrAttrsToSave(osAccountInst.getOsAccount(), osAccountInst.getDataSource())); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Error getting OS account from OS account instance '%s'", osAccountInst), ex); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java index 9bbccb3317..cc8f739892 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationDataSource.java @@ -106,7 +106,7 @@ public class CorrelationDataSource implements Serializable { * data aosurce. * @param dataSource The case database data source. * - * @return The cnetral repository data source. + * @return The central repository data source. * * @throws CentralRepoException This exception is thrown if there is an * error creating the central repository data @@ -114,7 +114,7 @@ public class CorrelationDataSource implements Serializable { */ public static CorrelationDataSource fromTSKDataSource(CorrelationCase correlationCase, Content dataSource) throws CentralRepoException { if (!CentralRepository.isEnabled()) { - throw new CentralRepoException("Central repository is not enabled, cannot create central repository data source, "); + throw new CentralRepoException(String.format("Central repository is not enabled, cannot create central repository data source for '%s'", dataSource)); } Case curCase; @@ -142,11 +142,10 @@ public class CorrelationDataSource implements Serializable { } catch (TskDataException | TskCoreException ex) { throw new CentralRepoException("Error getting data source info from case database", ex); } - correlationDataSource = new CorrelationDataSource(correlationCase, deviceId, dataSource.getName(), dataSource.getId(), md5, sha1, sha256); correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource); } - + return correlationDataSource; } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index f1610659f9..4b111d95f2 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -209,7 +209,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo try { List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(dataSource.getId()); for (OsAccount osAccount : osAccounts) { - process(osAccount); + process(osAccount, dataSource); } } catch (TskCoreException ex) { LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", dataSource, ingestJobId), ex); @@ -217,15 +217,16 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } /** - * Translates the attributes of a OS account into central repository - * correlation attributes and uses them to create analysis results and new - * central repository correlation attribute instances, depending on ingest - * job settings. + * Translates the attributes of a OS account and its data source (an OS + * account instance) into central repository correlation attributes and uses + * them to create analysis results and new central repository correlation + * attribute instances, depending on ingest job settings. * - * @param osAccount The OS account. + * @param osAccount The OS account. + * @param dataSource The data source. */ - private void process(OsAccount osAccount) { - List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount); + private void process(OsAccount osAccount, Content dataSource) { + List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, dataSource); for (CorrelationAttributeInstance corrAttr : corrAttrs) { if (!corrAttrsAlreadyCreated.add(corrAttr.toString())) { /* diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED index be12157474..925e77c27c 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED @@ -180,837 +180,776 @@ TranslatablePanel.comboBoxOption.translatedText=Translated Text # {0} - exception message TranslatablePanel.onSetContentError.text=There was an error displaying the text: {0} ## Window toolbar Title -viewer.window.title.default = ICEpdf Viewer -viewer.window.title.open.default = ICEpdf Viewer - [{0}] +viewer.window.title.default=ICEpdf Viewer +viewer.window.title.open.default=ICEpdf Viewer - [{0}] #status bar -viewer.statusbar.currentPage = Page {0} / {1} +viewer.statusbar.currentPage=Page {0} / {1} -viewer.common.number.one = 1 -viewer.common.number.two = 2 -viewer.common.number.three = 3 -viewer.common.number.four = 4 -viewer.common.number.five = 5 -viewer.common.number.six = 6 -viewer.common.number.seven = 7 -viewer.common.number.eight = 8 -viewer.common.number.nine = 9 -viewer.common.number.ten = 10 -viewer.common.number.eleven = 11 -viewer.common.number.twelve = 12 -viewer.common.number.thirteen = 13 -viewer.common.number.fourteen = 14 -viewer.common.number.fifteen = 15 -viewer.common.number.sixteen = 16 -viewer.common.number.seventeen = 17 -viewer.common.number.eighteen = 18 -viewer.common.number.nineteen = 19 -viewer.common.number.twenty = 20 -viewer.common.number.twentyOne = 21 -viewer.common.number.twentyTwo = 22 -viewer.common.number.twentyThree = 23 -viewer.common.number.twentyFour = 24 -viewer.common.number.twentyFive = 25 -viewer.common.number.twentySix = 26 -viewer.common.number.twentySeven = 27 -viewer.common.number.thirtySix = 36 -viewer.common.number.fortyEight = 48 +viewer.common.number.one=1 +viewer.common.number.two=2 +viewer.common.number.three=3 +viewer.common.number.four=4 +viewer.common.number.five=5 +viewer.common.number.six=6 +viewer.common.number.seven=7 +viewer.common.number.eight=8 +viewer.common.number.nine=9 +viewer.common.number.ten=10 +viewer.common.number.eleven=11 +viewer.common.number.twelve=12 +viewer.common.number.thirteen=13 +viewer.common.number.fourteen=14 +viewer.common.number.fifteen=15 +viewer.common.number.sixteen=16 +viewer.common.number.seventeen=17 +viewer.common.number.eighteen=18 +viewer.common.number.nineteen=19 +viewer.common.number.twenty=20 +viewer.common.number.twentyOne=21 +viewer.common.number.twentyTwo=22 +viewer.common.number.twentyThree=23 +viewer.common.number.twentyFour=24 +viewer.common.number.twentyFive=25 +viewer.common.number.twentySix=26 +viewer.common.number.twentySeven=27 +viewer.common.number.thirtySix=36 +viewer.common.number.fortyEight=48 ## Top Page Control Toolbar -viewer.toolbar.hideToolBar.label = Hide Toolbar -viewer.toolbar.showToolBar.label = Show Toolbar -viewer.toolbar.showUtilityPane.label = Show Utility Pane -viewer.toolbar.hideUtilityPane.label = Hide Utility Pane -viewer.toolbar.open.label = -viewer.toolbar.open.tooltip = Open Document -viewer.toolbar.saveAs.label = Save As -viewer.toolbar.saveAs.tooltip = Save As... -viewer.toolbar.print.label = Print -viewer.toolbar.print.tooltip = Print Document -viewer.toolbar.search.label = Search -viewer.toolbar.search.tooltip = Search Document -viewer.toolbar.utilityPane.label = Utility Pane -viewer.toolbar.utilityPane.tooltip = Show/Hide Utility Pane -viewer.toolbar.navigation.label = -viewer.toolbar.navigation.pages.tooltip = Number of Pages -viewer.toolbar.navigation.pages.firstPage.label = -viewer.toolbar.navigation.current.tooltip = Current Page Number -viewer.toolbar.navigation.current.firstPage.label = -viewer.toolbar.navigation.firstPage.label = -viewer.toolbar.navigation.firstPage.tooltip = First Page -viewer.toolbar.navigation.previousPage.label = -viewer.toolbar.navigation.previousPage.tooltip = Previous Page -viewer.toolbar.navigation.nextPage.label = -viewer.toolbar.navigation.nextPage.tooltip = Next Page -viewer.toolbar.navigation.lastPage.label = -viewer.toolbar.navigation.lastPage.tooltip = Last Page -viewer.toolbar.pageIndicator = of {0} -viewer.toolbar.zoom.label = -viewer.toolbar.zoom.tooltip = Zoom -viewer.toolbar.zoom.out.label = -viewer.toolbar.zoom.out.tooltip = Zoom Out -viewer.toolbar.zoom.in.label = -viewer.toolbar.zoom.in.tooltip = Zoom In -viewer.toolbar.pageFit.actualsize.label = -viewer.toolbar.pageFit.actualsize.tooltip = Actual Size -viewer.toolbar.pageFit.fitWindow.label = -viewer.toolbar.pageFit.fitWindow.tooltip = Fit in Window -viewer.toolbar.pageFit.fitWidth.label = -viewer.toolbar.pageFit.fitWidth.tooltip = Fit Width -viewer.toolbar.rotation.left.label = -viewer.toolbar.rotation.left.tooltip = Rotate Left -viewer.toolbar.rotation.right.label = -viewer.toolbar.rotation.right.tooltip = Rotate Right -viewer.toolbar.tool.pan.label = -viewer.toolbar.tool.pan.tooltip = Pan Tool -viewer.toolbar.tool.text.label = -viewer.toolbar.tool.text.tooltip = Text Select Tool -viewer.toolbar.tool.select.label = -viewer.toolbar.tool.select.tooltip = Select Tool -viewer.toolbar.tool.link.label = -viewer.toolbar.tool.link.tooltip = Link Annotation Tool -viewer.toolbar.tool.highlight.label = Highlight -viewer.toolbar.tool.highlight.tooltip = Highlight Annotation Tool -viewer.toolbar.tool.strikeOut.label = Strike Out -viewer.toolbar.tool.strikeOut.tooltip = Strike Out Annotation Tool -viewer.toolbar.tool.underline.label = Underline -viewer.toolbar.tool.underline.tooltip = Underline Annotation Tool -viewer.toolbar.tool.line.label = Line -viewer.toolbar.tool.line.tooltip = Line Annotation Tool -viewer.toolbar.tool.lineArrow.label = Line Arrow -viewer.toolbar.tool.lineArrow.tooltip = Line Arrow Annotation Tool -viewer.toolbar.tool.rectangle.label = Rectangle -viewer.toolbar.tool.rectangle.tooltip = Rectangle Annotation Tool -viewer.toolbar.tool.circle.label = Circle -viewer.toolbar.tool.circle.tooltip = Circle Annotation Tool -viewer.toolbar.tool.ink.label = Ink -viewer.toolbar.tool.ink.tooltip = Ink Annotation Tool -viewer.toolbar.tool.freeText.label = Free Text -viewer.toolbar.tool.freeText.tooltip = Free Text Annotation Tool -viewer.toolbar.tool.textAnno.label = Text Annotation -viewer.toolbar.tool.textAnno.tooltip = Text Annotation Tool -viewer.toolbar.tool.plolyLine.label = Poly Line -viewer.toolbar.tool.plolyLine.tooltip = Poly Line Annotation Tool -viewer.toolbar.tool.zoomIn.label = -viewer.toolbar.tool.zoomIn.tooltip = Zoom In Tool -viewer.toolbar.tool.zoomMarquis.label = -viewer.toolbar.tool.zoomMarquis.tooltip = Zoom Marquee Tool -viewer.toolbar.tool.zoomDynamic.label = -viewer.toolbar.tool.zoomDynamic.tooltip = Zoom Dynamic Tool -viewer.toolbar.tool.zoomOut.label = -viewer.toolbar.tool.zoomOut.tooltip = Zoom Out Tool -viewer.toolbar.pageFit.fontEngine.label = -viewer.toolbar.pageFit.fontEngine.tooltip = Enable/Disable Font Engine -viewer.toolbar.tool.forms.highlight.label = Highlight Forms -viewer.toolbar.tool.forms.highlight.tooltip = Show/Hide Form Highlighting +viewer.toolbar.hideToolBar.label=Hide Toolbar +viewer.toolbar.showToolBar.label=Show Toolbar +viewer.toolbar.showUtilityPane.label=Show Utility Pane +viewer.toolbar.hideUtilityPane.label=Hide Utility Pane +viewer.toolbar.open.label= +viewer.toolbar.open.tooltip=Open Document +viewer.toolbar.saveAs.label=Save As +viewer.toolbar.saveAs.tooltip=Save As... +viewer.toolbar.print.label=Print +viewer.toolbar.print.tooltip=Print Document +viewer.toolbar.search.label=Search +viewer.toolbar.search.tooltip=Search Document +viewer.toolbar.utilityPane.label=Utility Pane +viewer.toolbar.utilityPane.tooltip=Show/Hide Utility Pane +viewer.toolbar.navigation.label= +viewer.toolbar.navigation.pages.tooltip=Number of Pages +viewer.toolbar.navigation.pages.firstPage.label= +viewer.toolbar.navigation.current.tooltip=Current Page Number +viewer.toolbar.navigation.current.firstPage.label= +viewer.toolbar.navigation.firstPage.label= +viewer.toolbar.navigation.firstPage.tooltip=First Page +viewer.toolbar.navigation.previousPage.label= +viewer.toolbar.navigation.previousPage.tooltip=Previous Page +viewer.toolbar.navigation.nextPage.label= +viewer.toolbar.navigation.nextPage.tooltip=Next Page +viewer.toolbar.navigation.lastPage.label= +viewer.toolbar.navigation.lastPage.tooltip=Last Page +viewer.toolbar.pageIndicator=of {0} +viewer.toolbar.zoom.label= +viewer.toolbar.zoom.tooltip=Zoom +viewer.toolbar.zoom.out.label= +viewer.toolbar.zoom.out.tooltip=Zoom Out +viewer.toolbar.zoom.in.label= +viewer.toolbar.zoom.in.tooltip=Zoom In +viewer.toolbar.pageFit.actualsize.label= +viewer.toolbar.pageFit.actualsize.tooltip=Actual Size +viewer.toolbar.pageFit.fitWindow.label= +viewer.toolbar.pageFit.fitWindow.tooltip=Fit in Window +viewer.toolbar.pageFit.fitWidth.label= +viewer.toolbar.pageFit.fitWidth.tooltip=Fit Width +viewer.toolbar.rotation.left.label= +viewer.toolbar.rotation.left.tooltip=Rotate Left +viewer.toolbar.rotation.right.label= +viewer.toolbar.rotation.right.tooltip=Rotate Right +viewer.toolbar.tool.pan.label= +viewer.toolbar.tool.pan.tooltip=Pan Tool +viewer.toolbar.tool.text.label= +viewer.toolbar.tool.text.tooltip=Text Select Tool +viewer.toolbar.tool.select.label= +viewer.toolbar.tool.select.tooltip=Select Tool +viewer.toolbar.tool.link.label= +viewer.toolbar.tool.link.tooltip=Link Annotation Tool +viewer.toolbar.tool.highlight.label=Highlight +viewer.toolbar.tool.highlight.tooltip=Highlight Annotation Tool +viewer.toolbar.tool.strikeOut.label=Strike Out +viewer.toolbar.tool.strikeOut.tooltip=Strike Out Annotation Tool +viewer.toolbar.tool.underline.label=Underline +viewer.toolbar.tool.underline.tooltip=Underline Annotation Tool +viewer.toolbar.tool.line.label=Line +viewer.toolbar.tool.line.tooltip=Line Annotation Tool +viewer.toolbar.tool.lineArrow.label=Line Arrow +viewer.toolbar.tool.lineArrow.tooltip=Line Arrow Annotation Tool +viewer.toolbar.tool.rectangle.label=Rectangle +viewer.toolbar.tool.rectangle.tooltip=Rectangle Annotation Tool +viewer.toolbar.tool.circle.label=Circle +viewer.toolbar.tool.circle.tooltip=Circle Annotation Tool +viewer.toolbar.tool.ink.label=Ink +viewer.toolbar.tool.ink.tooltip=Ink Annotation Tool +viewer.toolbar.tool.freeText.label=Free Text +viewer.toolbar.tool.freeText.tooltip=Free Text Annotation Tool +viewer.toolbar.tool.textAnno.label=Text Annotation +viewer.toolbar.tool.textAnno.tooltip=Text Annotation Tool +viewer.toolbar.tool.plolyLine.label=Poly Line +viewer.toolbar.tool.plolyLine.tooltip=Poly Line Annotation Tool +viewer.toolbar.tool.zoomIn.label= +viewer.toolbar.tool.zoomIn.tooltip=Zoom In Tool +viewer.toolbar.tool.zoomMarquis.label= +viewer.toolbar.tool.zoomMarquis.tooltip=Zoom Marquee Tool +viewer.toolbar.tool.zoomDynamic.label= +viewer.toolbar.tool.zoomDynamic.tooltip=Zoom Dynamic Tool +viewer.toolbar.tool.zoomOut.label= +viewer.toolbar.tool.zoomOut.tooltip=Zoom Out Tool +viewer.toolbar.pageFit.fontEngine.label= +viewer.toolbar.pageFit.fontEngine.tooltip=Enable/Disable Font Engine +viewer.toolbar.tool.forms.highlight.label=Highlight Forms +viewer.toolbar.tool.forms.highlight.tooltip=Show/Hide Form Highlighting ## Bottom Page View Control Toolbar -viewer.toolbar.pageView.nonContinuous.singlePage.label = -viewer.toolbar.pageView.nonContinuous.singlePage.tooltip = Single Page View Non-Continuous -viewer.toolbar.pageView.nonContinuous.facingPage.label = -viewer.toolbar.pageView.nonContinuous.facingPage.tooltip = Facing Page View Non-Continuous -viewer.toolbar.pageView.continuous.singlePage.label = -viewer.toolbar.pageView.continuous.singlePage.tooltip = Single Page View Continuous -viewer.toolbar.pageView.continuous.facingPage.label = -viewer.toolbar.pageView.continuous.facingPage.tooltip = Facing Page View Continuous +viewer.toolbar.pageView.nonContinuous.singlePage.label= +viewer.toolbar.pageView.nonContinuous.singlePage.tooltip=Single Page View Non-Continuous +viewer.toolbar.pageView.nonContinuous.facingPage.label= +viewer.toolbar.pageView.nonContinuous.facingPage.tooltip=Facing Page View Non-Continuous +viewer.toolbar.pageView.continuous.singlePage.label= +viewer.toolbar.pageView.continuous.singlePage.tooltip=Single Page View Continuous +viewer.toolbar.pageView.continuous.facingPage.label= +viewer.toolbar.pageView.continuous.facingPage.tooltip=Facing Page View Continuous ## File Menu and submenu items -viewer.menu.file.label = File -viewer.menu.file.mnemonic = F -viewer.menu.open.label = Open -viewer.menu.open.file.label = File... -viewer.menu.open.URL.label = URL... -viewer.menu.close.label = Close -viewer.menu.saveAs.label = Save As... -viewer.menu.exportText.label = Export Text... -viewer.menu.exportSVG.label = Export SVG... -viewer.menu.documentPermission.label = Document Permissions... -viewer.menu.documentInformation.label = Document Information... -viewer.menu.documentFonts.label = Document Fonts... -viewer.menu.printSetup.label = Print Setup... -viewer.menu.print.label = Print... -viewer.menu.exit.label = Exit +viewer.menu.file.label=File +viewer.menu.file.mnemonic=F +viewer.menu.open.label=Open +viewer.menu.open.file.label=File... +viewer.menu.open.URL.label=URL... +viewer.menu.close.label=Close +viewer.menu.saveAs.label=Save As... +viewer.menu.exportText.label=Export Text... +viewer.menu.exportSVG.label=Export SVG... +viewer.menu.documentPermission.label=Document Permissions... +viewer.menu.documentInformation.label=Document Information... +viewer.menu.documentFonts.label=Document Fonts... +viewer.menu.printSetup.label=Print Setup... +viewer.menu.print.label=Print... +viewer.menu.exit.label=Exit ## View Menu and submenu items -viewer.menu.edit.label = Edit -viewer.menu.edit.mnemonic = E -viewer.menu.edit.undo.label = Undo -viewer.menu.edit.redo.label = Redo -viewer.menu.edit.copy.label = Copy -viewer.menu.edit.delete.label = Delete -viewer.menu.edit.selectAll.label = Select All -viewer.menu.edit.deselectAll.label = Deselect All +viewer.menu.edit.label=Edit +viewer.menu.edit.mnemonic=E +viewer.menu.edit.undo.label=Undo +viewer.menu.edit.redo.label=Redo +viewer.menu.edit.copy.label=Copy +viewer.menu.edit.delete.label=Delete +viewer.menu.edit.selectAll.label=Select All +viewer.menu.edit.deselectAll.label=Deselect All ## View Menu and submenu items -viewer.menu.view.label = View -viewer.menu.view.mnemonic = V -viewer.menu.view.actualSize.label = Actual Size -viewer.menu.view.fitInWindow.label = Fit in Window -viewer.menu.view.fitWidth.label = Fit Width -viewer.menu.view.zoomIn.label = Zoom In -viewer.menu.view.zoomOut.label = Zoom Out -viewer.menu.view.rotateLeft.label = Rotate Left -viewer.menu.view.rotateRight.label = Rotate Right -viewer.menu.view.hideToolBar.label = Hide Toolbar -viewer.menu.view.showToolBar.label = Show Toolbar -viewer.menu.view.showUtilityPane.label = Show Utility Pane -viewer.menu.view.hideUtilityPane.label = Hide Utility Pane +viewer.menu.view.label=View +viewer.menu.view.mnemonic=V +viewer.menu.view.actualSize.label=Actual Size +viewer.menu.view.fitInWindow.label=Fit in Window +viewer.menu.view.fitWidth.label=Fit Width +viewer.menu.view.zoomIn.label=Zoom In +viewer.menu.view.zoomOut.label=Zoom Out +viewer.menu.view.rotateLeft.label=Rotate Left +viewer.menu.view.rotateRight.label=Rotate Right +viewer.menu.view.hideToolBar.label=Hide Toolbar +viewer.menu.view.showToolBar.label=Show Toolbar +viewer.menu.view.showUtilityPane.label=Show Utility Pane +viewer.menu.view.hideUtilityPane.label=Hide Utility Pane ## Document Menu and submenu items -viewer.menu.document.label = Document -viewer.menu.document.mnemonic = D -viewer.menu.document.firstPage.label = First Page -viewer.menu.document.previousPage.label = Previous Page -viewer.menu.document.nextPage.label = Next Page -viewer.menu.document.lastPage.label = Last Page -viewer.menu.document.search.label = Search... -viewer.menu.document.gotToPage.label = Go To Page... +viewer.menu.document.label=Document +viewer.menu.document.mnemonic=D +viewer.menu.document.firstPage.label=First Page +viewer.menu.document.previousPage.label=Previous Page +viewer.menu.document.nextPage.label=Next Page +viewer.menu.document.lastPage.label=Last Page +viewer.menu.document.search.label=Search... +viewer.menu.document.gotToPage.label=Go To Page... ## Window Menu and submenu items -viewer.menu.window.label = Window -viewer.menu.window.mnemonic = W -viewer.menu.window.minAll.label = Minimize All -viewer.menu.window.minAll.mnemonic = M -viewer.menu.window.frontAll.label = Bring All to Front -viewer.menu.window.frontAll.mnemonic = B -viewer.menu.window.1.label = 1 -viewer.menu.window.1.mnemonic = 1 -viewer.menu.window.2.label = 2 -viewer.menu.window.2.mnemonic = 2 -viewer.menu.window.3.label = 3 -viewer.menu.window.3.mnemonic = 3 -viewer.menu.window.4.label = 4 -viewer.menu.window.4.mnemonic = 4 -viewer.menu.window.5.label = 5 -viewer.menu.window.5.mnemonic = 5 -viewer.menu.window.6.label = 6 -viewer.menu.window.6.mnemonic = 6 -viewer.menu.window.7.label = 7 -viewer.menu.window.7.mnemonic = 7 -viewer.menu.window.8.label = 8 -viewer.menu.window.8.mnemonic = 8 -viewer.menu.window.9.label = 9 -viewer.menu.window.9.mnemonic = 9 +viewer.menu.window.label=Window +viewer.menu.window.mnemonic=W +viewer.menu.window.minAll.label=Minimize All +viewer.menu.window.minAll.mnemonic=M +viewer.menu.window.frontAll.label=Bring All to Front +viewer.menu.window.frontAll.mnemonic=B +viewer.menu.window.1.label=1 +viewer.menu.window.1.mnemonic=1 +viewer.menu.window.2.label=2 +viewer.menu.window.2.mnemonic=2 +viewer.menu.window.3.label=3 +viewer.menu.window.3.mnemonic=3 +viewer.menu.window.4.label=4 +viewer.menu.window.4.mnemonic=4 +viewer.menu.window.5.label=5 +viewer.menu.window.5.mnemonic=5 +viewer.menu.window.6.label=6 +viewer.menu.window.6.mnemonic=6 +viewer.menu.window.7.label=7 +viewer.menu.window.7.mnemonic=7 +viewer.menu.window.8.label=8 +viewer.menu.window.8.mnemonic=8 +viewer.menu.window.9.label=9 +viewer.menu.window.9.mnemonic=9 ## Add as many entries as you want, to viewer.menu.window.X.label and mnemonic ## where X is an incrementing integer. The mnemonic should be one unique ## character found within the label ## Help Menu and submenu items -viewer.menu.help.label = Help -viewer.menu.help.mnemonic = H -viewer.menu.help.about.label = About ICEpdf viewer... +viewer.menu.help.label=Help +viewer.menu.help.mnemonic=H +viewer.menu.help.about.label=About ICEpdf viewer... ## General error dialog -viewer.dialog.error.exception.title = ICEsoft ICEpdf - Exception -viewer.dialog.error.exception.msg = \ - There was an error executing your command do to the following exception\n\ - {0}. +viewer.dialog.error.exception.title=ICEsoft ICEpdf - Exception +viewer.dialog.error.exception.msg=There was an error executing your command do to the following exception\n{0}. ## Open File Dialog -viewer.dialog.openFile.title = Open File -viewer.dialog.openFile.error.title = ICEsoft ICEpdf - Open File Error -viewer.dialog.openFile.error.msg = \ - ICEpdf could not open the specified file at {0}\n\ - The file may be corrupt or not a supported file type. +viewer.dialog.openFile.title=Open File +viewer.dialog.openFile.error.title=ICEsoft ICEpdf - Open File Error +viewer.dialog.openFile.error.msg=ICEpdf could not open the specified file at {0}\nThe file may be corrupt or not a supported file type. -viewer.dialog.openDocument.pdfException.title = ICEsoft ICEpdf - PDF Exception -viewer.dialog.openDocument.pdfException.msg = \ - ICEpdf could not open the specified file {0} \n\ - The file may be corrupt or not a supported file type. +viewer.dialog.openDocument.pdfException.title=ICEsoft ICEpdf - PDF Exception +viewer.dialog.openDocument.pdfException.msg=ICEpdf could not open the specified file {0} \nThe file may be corrupt or not a supported file type. -viewer.dialog.openDocument.pdfSecurityException.title = ICEsoft ICEpdf - PDF Security Exception -viewer.dialog.openDocument.pdfSecurityException.msg = \ - ICEpdf could not open the encrypted file at {0}\n\ - This may be the result of an invalid password or a missing JCE Security Provider.\n\n\ - Please refer to ICEpdf Developer's Guide for more information. +viewer.dialog.openDocument.pdfSecurityException.title=ICEsoft ICEpdf - PDF Security Exception +viewer.dialog.openDocument.pdfSecurityException.msg=ICEpdf could not open the encrypted file at {0}\nThis may be the result of an invalid password or a missing JCE Security Provider.\n\nPlease refer to ICEpdf Developer's Guide for more information. -viewer.dialog.openDocument.exception.title = ICEsoft ICEpdf - Exception -viewer.dialog.openDocument.exception.msg = \ - ICEpdf could not open the specified file at {0}\n\ - The file may be corrupt or not a supported file type. +viewer.dialog.openDocument.exception.title=ICEsoft ICEpdf - Exception +viewer.dialog.openDocument.exception.msg=ICEpdf could not open the specified file at {0}\nThe file may be corrupt or not a supported file type. -viewer.dialog.openURL.exception.title = ICEsoft ICEpdf - URL Exception -viewer.dialog.openURL.exception.msg = \ - ICEpdf could not open the specified file. {0} \n\ - at URL: {1} -viewer.dialog.openURL.downloading.msg = Downloading {0} +viewer.dialog.openURL.exception.title=ICEsoft ICEpdf - URL Exception +viewer.dialog.openURL.exception.msg=ICEpdf could not open the specified file. {0} \nat URL: {1} +viewer.dialog.openURL.downloading.msg=Downloading {0} ## General error dialog -viewer.dialog.information.copyAll.title = ICEsoft ICEpdf - Information -viewer.dialog.information.copyAll.msg = \ - The document has more than {0} pages, please use\n\ - "Export text..." to extract document text. +viewer.dialog.information.copyAll.title=ICEsoft ICEpdf - Information +viewer.dialog.information.copyAll.msg=The document has more than {0} pages, please use\n"Export text..." to extract document text. ## Open URL Dialog -viewer.dialog.security.title = Document Security -viewer.dialog.security.msg = This PDF is protected -viewer.dialog.security.password.label = Password: -viewer.dialog.security.okButton.label = Ok -viewer.dialog.security.okButton.mnemonic = O -viewer.dialog.security.cancelButton.label = Cancel -viewer.dialog.security.cancelButton.mnemonic = C +viewer.dialog.security.title=Document Security +viewer.dialog.security.msg=This PDF is protected +viewer.dialog.security.password.label=Password: +viewer.dialog.security.okButton.label=Ok +viewer.dialog.security.okButton.mnemonic=O +viewer.dialog.security.cancelButton.label=Cancel +viewer.dialog.security.cancelButton.mnemonic=C ## Open URL Dialog -viewer.dialog.openURL.title = Open URL +viewer.dialog.openURL.title=Open URL ### Save a Copy Dialog -viewer.dialog.saveAs.title = Save As -viewer.dialog.saveAs.extensionError.title = ICEsoft ICEpdf - Save Error -viewer.dialog.saveAs.extensionError.msg = \ - ICEpdf could not save to {0} because it is not a supported file type. -viewer.dialog.saveAs.noExtensionError.title = ICEsoft ICEpdf - Save Error -viewer.dialog.saveAs.noExtensionError.msg = Please specify a file extension. -viewer.dialog.saveAs.noneUniqueName.title = ICEsoft ICEpdf - Save Error -viewer.dialog.saveAs.noneUniqueName.msg = \ - The file named {0} already exists. Please specify a unique name. -viewer.dialog.saveAs.noPermission.title = ICEpdf Viewer RI - Save Error -viewer.dialog.saveAs.noPermission.msg = You do not have permission or the credentials to save this document. -viewer.dialog.saveAs.noUpdates.title = ICEpdf Viewer RI -viewer.dialog.saveAs.noUpdates.msg = Document changes will not be saved, please upgrade to ICEpdf PRO. -viewer.dialog.saveOnClose.noUpdates.title = ICEpdf Viewer RI -viewer.dialog.saveOnClose.noUpdates.msg = Do you want to save changes to {0}? +viewer.dialog.saveAs.title=Save As +viewer.dialog.saveAs.extensionError.title=ICEsoft ICEpdf - Save Error +viewer.dialog.saveAs.extensionError.msg=ICEpdf could not save to {0} because it is not a supported file type. +viewer.dialog.saveAs.noExtensionError.title=ICEsoft ICEpdf - Save Error +viewer.dialog.saveAs.noExtensionError.msg=Please specify a file extension. +viewer.dialog.saveAs.noneUniqueName.title=ICEsoft ICEpdf - Save Error +viewer.dialog.saveAs.noneUniqueName.msg=The file named {0} already exists. Please specify a unique name. +viewer.dialog.saveAs.noPermission.title=ICEpdf Viewer RI - Save Error +viewer.dialog.saveAs.noPermission.msg=You do not have permission or the credentials to save this document. +viewer.dialog.saveAs.noUpdates.title=ICEpdf Viewer RI +viewer.dialog.saveAs.noUpdates.msg=Document changes will not be saved, please upgrade to ICEpdf PRO. +viewer.dialog.saveOnClose.noUpdates.title=ICEpdf Viewer RI +viewer.dialog.saveOnClose.noUpdates.msg=Do you want to save changes to {0}? ## Export Text Dialog -viewer.dialog.exportText.title = Export Document Text -viewer.dialog.exportText.progress.msg = Extracting PDF Text -viewer.dialog.exportText.noExtensionError.title = ICEsoft ICEpdf - Save Error -viewer.dialog.exportText.noExtensionError.msg = Please specify a file extension. +viewer.dialog.exportText.title=Export Document Text +viewer.dialog.exportText.progress.msg=Extracting PDF Text +viewer.dialog.exportText.noExtensionError.title=ICEsoft ICEpdf - Save Error +viewer.dialog.exportText.noExtensionError.msg=Please specify a file extension. # Text extraction output file -viewer.exportText.fileStamp.msg = ICEsoft ICEpdf Viewer, (c) ICEsoft Technologies, Inc. -viewer.exportText.pageStamp.msg = +viewer.exportText.fileStamp.msg=ICEsoft ICEpdf Viewer, (c) ICEsoft Technologies, Inc. +viewer.exportText.pageStamp.msg= # Completed x out of y page(s). -viewer.exportText.fileStamp.progress.msg = \ - Completed {0} out of {1}. -viewer.exportText.fileStamp.progress.oneFile.msg = {2} page -viewer.exportText.fileStamp.progress.moreFile.msg = {2} pages +viewer.exportText.fileStamp.progress.msg=Completed {0} out of {1}. +viewer.exportText.fileStamp.progress.oneFile.msg={2} page +viewer.exportText.fileStamp.progress.moreFile.msg={2} pages ## Export SVG Dialog -viewer.dialog.exportSVG.title = Export to SVG -viewer.dialog.exportSVG.status.exporting.msg = Exporting page {0} to SVG file {1} ... -viewer.dialog.exportSVG.status.error.msg = \ - Problem exporting page {0} to SVG file {1} : {2} -viewer.dialog.exportSVG.status.finished.msg = \ - Finished exporting page {0} to SVG file {1} -viewer.dialog.exportSVG.noExtensionError.title = ICEsoft ICEpdf - SVG Error -viewer.dialog.exportSVG.noExtensionError.msg = Please specify a file extension. -viewer.dialog.exportSVG.exportError.title = ICEsoft ICEpdf - SVG Error -viewer.dialog.exportSVG.exportError.msg = \ - ICEpdf could not export to {0} \n\ - becuase it is either not a supported file type or \n\ - because the file has been corrupted. +viewer.dialog.exportSVG.title=Export to SVG +viewer.dialog.exportSVG.status.exporting.msg=Exporting page {0} to SVG file {1} ... +viewer.dialog.exportSVG.status.error.msg=Problem exporting page {0} to SVG file {1} : {2} +viewer.dialog.exportSVG.status.finished.msg=Finished exporting page {0} to SVG file {1} +viewer.dialog.exportSVG.noExtensionError.title=ICEsoft ICEpdf - SVG Error +viewer.dialog.exportSVG.noExtensionError.msg=Please specify a file extension. +viewer.dialog.exportSVG.exportError.title=ICEsoft ICEpdf - SVG Error +viewer.dialog.exportSVG.exportError.msg=ICEpdf could not export to {0} \nbecuase it is either not a supported file type or \nbecause the file has been corrupted. # Printing Progress bar -viewer.dialog.printing.status.progress.msg = Page {0} of {1} -viewer.dialog.printing.status.start.msg = Spooling Page(s) to Printer +viewer.dialog.printing.status.progress.msg=Page {0} of {1} +viewer.dialog.printing.status.start.msg=Spooling Page(s) to Printer ## Document Permissions Dialog -viewer.dialog.documentPermissions.title = Document Permissions -viewer.dialog.documentPermissions.securityMethod.label = Security Method: -viewer.dialog.documentPermissions.userPassword.label = User Password: -viewer.dialog.documentPermissions.ownerPassword.label = Owner Password: -viewer.dialog.documentPermissions.printing.label = Printing: -viewer.dialog.documentPermissions.changing.label = Changing the Document: -viewer.dialog.documentPermissions.copyExtraction.label = Content Copying or Extraction: -viewer.dialog.documentPermissions.comments.label = Aurthoring Comments and Form Fields: -viewer.dialog.documentPermissions.formFillingIn.label = Form Field Fill-in or Signing: -viewer.dialog.documentPermissions.accessibility.label = Content Accessibility Enabled: -viewer.dialog.documentPermissions.assembly.label = Document Assembly: -viewer.dialog.documentPermissions.encryptionLevel.label = Encryption Level: -viewer.dialog.documentPermissions.securityLevel = {0}-bit v{1} R {2} -viewer.dialog.documentPermissions.none = None -viewer.dialog.documentPermissions.no = No -viewer.dialog.documentPermissions.yes = Yes -viewer.dialog.documentPermissions.allowed = Allowed -viewer.dialog.documentPermissions.notAllowed = Not Allowed -viewer.dialog.documentPermissions.fullyAllowed = Fully Allowed -viewer.dialog.documentPermissions.standardSecurity = Adobe Acrobat Standard Security -viewer.dialog.documentPermissions.partial = Partial (Low Quality) +viewer.dialog.documentPermissions.title=Document Permissions +viewer.dialog.documentPermissions.securityMethod.label=Security Method: +viewer.dialog.documentPermissions.userPassword.label=User Password: +viewer.dialog.documentPermissions.ownerPassword.label=Owner Password: +viewer.dialog.documentPermissions.printing.label=Printing: +viewer.dialog.documentPermissions.changing.label=Changing the Document: +viewer.dialog.documentPermissions.copyExtraction.label=Content Copying or Extraction: +viewer.dialog.documentPermissions.comments.label=Aurthoring Comments and Form Fields: +viewer.dialog.documentPermissions.formFillingIn.label=Form Field Fill-in or Signing: +viewer.dialog.documentPermissions.accessibility.label=Content Accessibility Enabled: +viewer.dialog.documentPermissions.assembly.label=Document Assembly: +viewer.dialog.documentPermissions.encryptionLevel.label=Encryption Level: +viewer.dialog.documentPermissions.securityLevel={0}-bit v{1} R {2} +viewer.dialog.documentPermissions.none=None +viewer.dialog.documentPermissions.no=No +viewer.dialog.documentPermissions.yes=Yes +viewer.dialog.documentPermissions.allowed=Allowed +viewer.dialog.documentPermissions.notAllowed=Not Allowed +viewer.dialog.documentPermissions.fullyAllowed=Fully Allowed +viewer.dialog.documentPermissions.standardSecurity=Adobe Acrobat Standard Security +viewer.dialog.documentPermissions.partial=Partial (Low Quality) ## Document Information Dialog -viewer.dialog.documentInformation.title = Document Information -viewer.dialog.documentInformation.title.label = Title: -viewer.dialog.documentInformation.subject.label = Subject: -viewer.dialog.documentInformation.author.label = Author: -viewer.dialog.documentInformation.keywords.label = Keywords: -viewer.dialog.documentInformation.creator.label = Creator: -viewer.dialog.documentInformation.producer.label = Producer: -viewer.dialog.documentInformation.created.label = Created: -viewer.dialog.documentInformation.modified.label = Modified: -viewer.dialog.documentInformation.notAvailable = Not Available +viewer.dialog.documentInformation.title=Document Information +viewer.dialog.documentInformation.title.label=Title: +viewer.dialog.documentInformation.subject.label=Subject: +viewer.dialog.documentInformation.author.label=Author: +viewer.dialog.documentInformation.keywords.label=Keywords: +viewer.dialog.documentInformation.creator.label=Creator: +viewer.dialog.documentInformation.producer.label=Producer: +viewer.dialog.documentInformation.created.label=Created: +viewer.dialog.documentInformation.modified.label=Modified: +viewer.dialog.documentInformation.notAvailable=Not Available ## Go to Page Dialog -viewer.dialog.goToPage.title = Go to Page... -viewer.dialog.goToPage.description.label = Page Number +viewer.dialog.goToPage.title=Go to Page... +viewer.dialog.goToPage.description.label=Page Number ## About Dialog -viewer.dialog.about.title = About ICEpdf Viewer -viewer.dialog.about.pageNumber.label = \n\ -\n\ -Check the ICEpdf web site for the latest news:\n\ -http://www.icepdf.org/ \n\n +viewer.dialog.about.title=About ICEpdf Viewer +viewer.dialog.about.pageNumber.label=\n\nCheck the ICEpdf web site for the latest news:\nhttp://www.icepdf.org/ \n\n ## Font Properties Dialog -viewer.dialog.fonts.title = Document Font Properties -viewer.dialog.fonts.border.label = Fonts used by this document -viewer.dialog.fonts.info.type.label = Type: {0} -viewer.dialog.fonts.info.encoding.label = Encoding: {0} -viewer.dialog.fonts.info.substitution.type.label = Actual Type: {0} -viewer.dialog.fonts.info.substitution.path.label = Path: {0} -viewer.dialog.fonts.searching.label = Collecting font data ({0}%). -viewer.dialog.fonts.resetCache.label = Reset Cache -viewer.dialog.fonts.resetCache.tip = Reset font properties cache file and rescan system for new fonts. +viewer.dialog.fonts.title=Document Font Properties +viewer.dialog.fonts.border.label=Fonts used by this document +viewer.dialog.fonts.info.type.label=Type: {0} +viewer.dialog.fonts.info.encoding.label=Encoding: {0} +viewer.dialog.fonts.info.substitution.type.label=Actual Type: {0} +viewer.dialog.fonts.info.substitution.path.label=Path: {0} +viewer.dialog.fonts.searching.label=Collecting font data ({0}%). +viewer.dialog.fonts.resetCache.label=Reset Cache +viewer.dialog.fonts.resetCache.tip=Reset font properties cache file and rescan system for new fonts. ## Utility Pane Bookmarks Tab -viewer.utilityPane.bookmarks.tab.title = Bookmarks +viewer.utilityPane.bookmarks.tab.title=Bookmarks ## Utility Pane Bookmarks Tab -viewer.utilityPane.attachments.tab.title = Attachments -viewer.utilityPane.attachments.column.fileName.title = Name -viewer.utilityPane.attachments.column.description.title = Description -viewer.utilityPane.attachments.column.modified.title = Modified -viewer.utilityPane.attachments.column.size.title = Size -viewer.utilityPane.attachments.column.compressedSize.title = Compressed size -viewer.utilityPane.attachments.menu.saveAs.label = Save As... -viewer.utilityPane.attachments.saveAs.replace.title = ICEsoft ICEpdf - Save Error -viewer.utilityPane.attachments.saveAs.replace.msg = \ - The file named {0} already exists. Do you want to replace It? +viewer.utilityPane.attachments.tab.title=Attachments +viewer.utilityPane.attachments.column.fileName.title=Name +viewer.utilityPane.attachments.column.description.title=Description +viewer.utilityPane.attachments.column.modified.title=Modified +viewer.utilityPane.attachments.column.size.title=Size +viewer.utilityPane.attachments.column.compressedSize.title=Compressed size +viewer.utilityPane.attachments.menu.saveAs.label=Save As... +viewer.utilityPane.attachments.saveAs.replace.title=ICEsoft ICEpdf - Save Error +viewer.utilityPane.attachments.saveAs.replace.msg=The file named {0} already exists. Do you want to replace It? ## Utility Pane Thumbnails -viewer.utilityPane.thumbs.tab.title = Thumbnails +viewer.utilityPane.thumbs.tab.title=Thumbnails ## Layers Pane -viewer.utilityPane.layers.tab.title = Layers +viewer.utilityPane.layers.tab.title=Layers ## Signature Pane -viewer.utilityPane.signatures.tab.title = Signatures -viewer.utilityPane.signatures.tab.certTree.error.label = \ - Unsigned Signature Fields Signer certificate could not be validated {0} {1} -viewer.utilityPane.signatures.tab.certTree.rootSigned.label = Signed by {0} {1} -viewer.utilityPane.signatures.tab.certTree.rootValidating.label = Validating signature {0} {1} -viewer.utilityPane.signatures.tab.certTree.cert.invalid.label = Signature is invalid: -viewer.utilityPane.signatures.tab.certTree.cert.unknown.label = Signature is valid: -viewer.utilityPane.signatures.tab.certTree.cert.valid.label = Signature validity is unknown: -viewer.utilityPane.signatures.tab.certTree.doc.modified.label = \ - This version of the document is unaltered but subsequent changes have been made -viewer.utilityPane.signatures.tab.certTree.doc.unmodified.label = Document has not been modified since it was signed -viewer.utilityPane.signatures.tab.certTree.doc.major.label = Document has been altered or corrupted since it was signed -viewer.utilityPane.signatures.tab.certTree.signature.identity.unknown.label = \ - Signer's identity is unknown because it could not be found in your keystore -viewer.utilityPane.signatures.tab.certTree.signature.identity.unchecked.label = \ - Signature is valid, but revocation of the signer's identity could not be checked -viewer.utilityPane.signatures.tab.certTree.signature.identity.valid.label = Signer's identity is valid -viewer.utilityPane.signatures.tab.certTree.signature.time.local.label = Signing time is from the clock on this signer's computer -viewer.utilityPane.signatures.tab.certTree.signature.time.embedded.label = \ - Signature included an embedded timestamp but it could not be validated -viewer.utilityPane.signatures.tab.certTree.signature.details.label = Signature Details -viewer.utilityPane.signatures.tab.certTree.signature.details.reason.label = Reason: {0} -viewer.utilityPane.signatures.tab.certTree.signature.details.location.label = Location: {0} -viewer.utilityPane.signatures.tab.certTree.signature.details.full.label = Certificate Details... -viewer.utilityPane.signatures.tab.certTree.signature.lastChecked.label = Last Checked: {0} -viewer.utilityPane.signatures.tab.certTree.unsigned.label = Unsigned Signature Fields +viewer.utilityPane.signatures.tab.title=Signatures +viewer.utilityPane.signatures.tab.certTree.error.label=Unsigned Signature Fields Signer certificate could not be validated {0} {1} +viewer.utilityPane.signatures.tab.certTree.rootSigned.label=Signed by {0} {1} +viewer.utilityPane.signatures.tab.certTree.rootValidating.label=Validating signature {0} {1} +viewer.utilityPane.signatures.tab.certTree.cert.invalid.label=Signature is invalid: +viewer.utilityPane.signatures.tab.certTree.cert.unknown.label=Signature is valid: +viewer.utilityPane.signatures.tab.certTree.cert.valid.label=Signature validity is unknown: +viewer.utilityPane.signatures.tab.certTree.doc.modified.label=This version of the document is unaltered but subsequent changes have been made +viewer.utilityPane.signatures.tab.certTree.doc.unmodified.label=Document has not been modified since it was signed +viewer.utilityPane.signatures.tab.certTree.doc.major.label=Document has been altered or corrupted since it was signed +viewer.utilityPane.signatures.tab.certTree.signature.identity.unknown.label=Signer's identity is unknown because it could not be found in your keystore +viewer.utilityPane.signatures.tab.certTree.signature.identity.unchecked.label=Signature is valid, but revocation of the signer's identity could not be checked +viewer.utilityPane.signatures.tab.certTree.signature.identity.valid.label=Signer's identity is valid +viewer.utilityPane.signatures.tab.certTree.signature.time.local.label=Signing time is from the clock on this signer's computer +viewer.utilityPane.signatures.tab.certTree.signature.time.embedded.label=Signature included an embedded timestamp but it could not be validated +viewer.utilityPane.signatures.tab.certTree.signature.details.label=Signature Details +viewer.utilityPane.signatures.tab.certTree.signature.details.reason.label=Reason: {0} +viewer.utilityPane.signatures.tab.certTree.signature.details.location.label=Location: {0} +viewer.utilityPane.signatures.tab.certTree.signature.details.full.label=Certificate Details... +viewer.utilityPane.signatures.tab.certTree.signature.lastChecked.label=Last Checked: {0} +viewer.utilityPane.signatures.tab.certTree.unsigned.label=Unsigned Signature Fields ## Signature certificate view dialog. -viewer.utilityPane.signatures.cert.dialog.title = Certificate Details -viewer.utilityPane.signatures.cert.dialog.closeButton.label = Close -viewer.utilityPane.signatures.cert.dialog.closeButton.mnemonic = C -viewer.utilityPane.signatures.cert.dialog.info.notAvailable.label = N/A -viewer.utilityPane.signatures.cert.dialog.info.unknownSubject.label = N/A Subject -viewer.utilityPane.signatures.cert.dialog.info.unknownIssuer.label = N/A Issuer -viewer.utilityPane.signatures.cert.dialog.info.certificateInfo.label = {0} - {1} -viewer.utilityPane.signatures.cert.dialog.info.column1.label = Field -viewer.utilityPane.signatures.cert.dialog.info.column2.label = Value -viewer.utilityPane.signatures.cert.dialog.info.version.label = Version -viewer.utilityPane.signatures.cert.dialog.info.version.value = v{0} -viewer.utilityPane.signatures.cert.dialog.info.serialNumber.label = Serial Number -viewer.utilityPane.signatures.cert.dialog.info.serialNumber.value = {0} -viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.label = Signature Algorithm -viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.value = {0} -viewer.utilityPane.signatures.cert.dialog.info.issuer.label = Issuer -viewer.utilityPane.signatures.cert.dialog.info.issuer.value = \ - Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} -viewer.utilityPane.signatures.cert.dialog.info.validity.label = Validity -viewer.utilityPane.signatures.cert.dialog.info.validity.value = From: {0}\n To: {1} -viewer.utilityPane.signatures.cert.dialog.info.subject.label = Subject -viewer.utilityPane.signatures.cert.dialog.info.subject.value = \ - Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} -viewer.utilityPane.signatures.cert.dialog.info.signature.label = Signature -viewer.utilityPane.signatures.cert.dialog.info.md5.label = MD5 Fingerprint -viewer.utilityPane.signatures.cert.dialog.info.md5.value = {0} -viewer.utilityPane.signatures.cert.dialog.info.sha1.label = SHA1 Fingerprint -viewer.utilityPane.signatures.cert.dialog.info.sha1.value = {0} -viewer.utilityPane.signatures.verify.initializingMessage.label = Validating {0} of {1} Signatures -viewer.utilityPane.signatures.verify.completeMessage.label = Validating process complete -viewer.utilityPane.signatures.verify.validating.label = Validating signature... +viewer.utilityPane.signatures.cert.dialog.title=Certificate Details +viewer.utilityPane.signatures.cert.dialog.closeButton.label=Close +viewer.utilityPane.signatures.cert.dialog.closeButton.mnemonic=C +viewer.utilityPane.signatures.cert.dialog.info.notAvailable.label=N/A +viewer.utilityPane.signatures.cert.dialog.info.unknownSubject.label=N/A Subject +viewer.utilityPane.signatures.cert.dialog.info.unknownIssuer.label=N/A Issuer +viewer.utilityPane.signatures.cert.dialog.info.certificateInfo.label={0} - {1} +viewer.utilityPane.signatures.cert.dialog.info.column1.label=Field +viewer.utilityPane.signatures.cert.dialog.info.column2.label=Value +viewer.utilityPane.signatures.cert.dialog.info.version.label=Version +viewer.utilityPane.signatures.cert.dialog.info.version.value=v{0} +viewer.utilityPane.signatures.cert.dialog.info.serialNumber.label=Serial Number +viewer.utilityPane.signatures.cert.dialog.info.serialNumber.value={0} +viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.label=Signature Algorithm +viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.value={0} +viewer.utilityPane.signatures.cert.dialog.info.issuer.label=Issuer +viewer.utilityPane.signatures.cert.dialog.info.issuer.value=Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} +viewer.utilityPane.signatures.cert.dialog.info.validity.label=Validity +viewer.utilityPane.signatures.cert.dialog.info.validity.value=From: {0}\n To: {1} +viewer.utilityPane.signatures.cert.dialog.info.subject.label=Subject +viewer.utilityPane.signatures.cert.dialog.info.subject.value=Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} +viewer.utilityPane.signatures.cert.dialog.info.signature.label=Signature +viewer.utilityPane.signatures.cert.dialog.info.md5.label=MD5 Fingerprint +viewer.utilityPane.signatures.cert.dialog.info.md5.value={0} +viewer.utilityPane.signatures.cert.dialog.info.sha1.label=SHA1 Fingerprint +viewer.utilityPane.signatures.cert.dialog.info.sha1.value={0} +viewer.utilityPane.signatures.verify.initializingMessage.label=Validating {0} of {1} Signatures +viewer.utilityPane.signatures.verify.completeMessage.label=Validating process complete +viewer.utilityPane.signatures.verify.validating.label=Validating signature... ## Annotation Tab -viewer.utilityPane.annotation.tab.title = Annotations +viewer.utilityPane.annotation.tab.title=Annotations ## Utility Pane Annotation Link Tab -viewer.utilityPane.annotation.link.appearance.title = Link Annotation -viewer.utilityPane.annotation.link.highlightType = Highlight Style: -viewer.utilityPane.annotation.link.none = None -viewer.utilityPane.annotation.link.invert = Invert` -viewer.utilityPane.annotation.link.outline = Outline -viewer.utilityPane.annotation.link.push = Push +viewer.utilityPane.annotation.link.appearance.title=Link Annotation +viewer.utilityPane.annotation.link.highlightType=Highlight Style: +viewer.utilityPane.annotation.link.none=None +viewer.utilityPane.annotation.link.invert=Invert` +viewer.utilityPane.annotation.link.outline=Outline +viewer.utilityPane.annotation.link.push=Push ## Utility Pane Annotation text markup Tab -viewer.utilityPane.annotation.textMarkup.appearance.title = Text Markup Annotation -viewer.utilityPane.annotation.textMarkup.highlightType = Type: -viewer.utilityPane.annotation.textMarkup.colorChooserTitle = MarKup Color -viewer.utilityPane.annotation.textMarkup.colorLabel = Color: -viewer.utilityPane.annotation.textMarkup.transparencyLabel = Transparency: +viewer.utilityPane.annotation.textMarkup.appearance.title=Text Markup Annotation +viewer.utilityPane.annotation.textMarkup.highlightType=Type: +viewer.utilityPane.annotation.textMarkup.colorChooserTitle=MarKup Color +viewer.utilityPane.annotation.textMarkup.colorLabel=Color: +viewer.utilityPane.annotation.textMarkup.transparencyLabel=Transparency: ## Utility Pane Annotation line Tab -viewer.utilityPane.annotation.line.appearance.title = Line Annotation -viewer.utilityPane.annotation.line.lineThickness = Line Thickness: -viewer.utilityPane.annotation.line.lineStyle = Line Style: -viewer.utilityPane.annotation.line.startStyle = Start: -viewer.utilityPane.annotation.line.endStyle = End: -viewer.utilityPane.annotation.line.colorChooserTitle = Line Color -viewer.utilityPane.annotation.line.colorInternalChooserTitle = Line Internal Color -viewer.utilityPane.annotation.line.colorLabel = Color: -viewer.utilityPane.annotation.line.colorInternalLabel = Fill Color: -viewer.utilityPane.annotation.line.end.none = None -viewer.utilityPane.annotation.line.end.openArrow = Open Arrow -viewer.utilityPane.annotation.line.end.closedArrow = Closed Arrow -viewer.utilityPane.annotation.line.end.diamond = Diamond -viewer.utilityPane.annotation.line.end.square = Square -viewer.utilityPane.annotation.line.end.circle = Circle -viewer.utilityPane.annotation.line.transparencyLabel = Transparency: +viewer.utilityPane.annotation.line.appearance.title=Line Annotation +viewer.utilityPane.annotation.line.lineThickness=Line Thickness: +viewer.utilityPane.annotation.line.lineStyle=Line Style: +viewer.utilityPane.annotation.line.startStyle=Start: +viewer.utilityPane.annotation.line.endStyle=End: +viewer.utilityPane.annotation.line.colorChooserTitle=Line Color +viewer.utilityPane.annotation.line.colorInternalChooserTitle=Line Internal Color +viewer.utilityPane.annotation.line.colorLabel=Color: +viewer.utilityPane.annotation.line.colorInternalLabel=Fill Color: +viewer.utilityPane.annotation.line.end.none=None +viewer.utilityPane.annotation.line.end.openArrow=Open Arrow +viewer.utilityPane.annotation.line.end.closedArrow=Closed Arrow +viewer.utilityPane.annotation.line.end.diamond=Diamond +viewer.utilityPane.annotation.line.end.square=Square +viewer.utilityPane.annotation.line.end.circle=Circle +viewer.utilityPane.annotation.line.transparencyLabel=Transparency: ## Utility Pane Annotation square Tab -viewer.utilityPane.annotation.square.appearance.title = Square Annotation -viewer.utilityPane.annotation.square.lineThickness = Border Thickness: -viewer.utilityPane.annotation.square.lineStyle = Border Style: -viewer.utilityPane.annotation.square.colorBorderChooserTitle = Border Color -viewer.utilityPane.annotation.square.colorInteriorChooserTitle = Fill Color -viewer.utilityPane.annotation.square.borderTypeLabel = Border Type: -viewer.utilityPane.annotation.square.colorBorderLabel = Border Color: -viewer.utilityPane.annotation.square.colorInteriorLabel = Fill Color: -viewer.utilityPane.annotation.square.fillTypeLabel = Fill Type: -viewer.utilityPane.annotation.square.transparencyLabel = Transparency: +viewer.utilityPane.annotation.square.appearance.title=Square Annotation +viewer.utilityPane.annotation.square.lineThickness=Border Thickness: +viewer.utilityPane.annotation.square.lineStyle=Border Style: +viewer.utilityPane.annotation.square.colorBorderChooserTitle=Border Color +viewer.utilityPane.annotation.square.colorInteriorChooserTitle=Fill Color +viewer.utilityPane.annotation.square.borderTypeLabel=Border Type: +viewer.utilityPane.annotation.square.colorBorderLabel=Border Color: +viewer.utilityPane.annotation.square.colorInteriorLabel=Fill Color: +viewer.utilityPane.annotation.square.fillTypeLabel=Fill Type: +viewer.utilityPane.annotation.square.transparencyLabel=Transparency: ## Utility Pane Annotation free text Tab -viewer.utilityPane.annotation.freeText.appearance.title = FreeText Annotation -viewer.utilityPane.annotation.freeText.font.name = Font Name: -viewer.utilityPane.annotation.freeText.font.style = Font Style: -viewer.utilityPane.annotation.freeText.font.size = Font Size: -viewer.utilityPane.annotation.freeText.font.color = Font Color: -viewer.utilityPane.annotation.freeText.font.color.ChooserTitle = Font Color -viewer.utilityPane.annotation.freeText.border.thickness = Border Thickness: -viewer.utilityPane.annotation.freeText.border.type = Border Type: -viewer.utilityPane.annotation.freeText.border.style = Border Style: -viewer.utilityPane.annotation.freeText.border.color = Border Color: -viewer.utilityPane.annotation.freeText.border.color.ChooserTitle = Border Color -viewer.utilityPane.annotation.freeText.fill.type = Fill Type: -viewer.utilityPane.annotation.freeText.fill.color = Fill Color: -viewer.utilityPane.annotation.freeText.transparencyLabel = Transparency: -viewer.utilityPane.annotation.freeText.fill.color.ChooserTitle = Fill Color -viewer.utilityPane.annotation.freeText.font.dialog = Dialog -viewer.utilityPane.annotation.freeText.font.dialogInput = DialogInput -viewer.utilityPane.annotation.freeText.font.monospaced = Monospaced -viewer.utilityPane.annotation.freeText.font.serif = Serif -viewer.utilityPane.annotation.freeText.font.sanSerif = SansSerif -viewer.utilityPane.annotation.freeText.font.style.plain = Plain -viewer.utilityPane.annotation.freeText.font.style.italic = Italic -viewer.utilityPane.annotation.freeText.font.style.bold = Bold -viewer.utilityPane.annotation.freeText.font.name.helvetica = Helvetica -viewer.utilityPane.annotation.freeText.font.name.helveticaOblique = Helvetica-Oblique -viewer.utilityPane.annotation.freeText.font.name.helveticaBold = Helvetica-Bold -viewer.utilityPane.annotation.freeText.font.name.HelveticaBoldOblique = Helvetica-BoldOblique -viewer.utilityPane.annotation.freeText.font.name.timesItalic = Times-Italic -viewer.utilityPane.annotation.freeText.font.name.timesBold = Times-Bold -viewer.utilityPane.annotation.freeText.font.name.timesBoldItalic = Times-BoldItalic -viewer.utilityPane.annotation.freeText.font.name.timesRoman = Times-Roman -viewer.utilityPane.annotation.freeText.font.name.courier = Courier -viewer.utilityPane.annotation.freeText.font.name.courierOblique = Courier-Oblique -viewer.utilityPane.annotation.freeText.font.name.courierBoldOblique = Courier-BoldOblique -viewer.utilityPane.annotation.freeText.font.name.courierBold = Courier-Bold +viewer.utilityPane.annotation.freeText.appearance.title=FreeText Annotation +viewer.utilityPane.annotation.freeText.font.name=Font Name: +viewer.utilityPane.annotation.freeText.font.style=Font Style: +viewer.utilityPane.annotation.freeText.font.size=Font Size: +viewer.utilityPane.annotation.freeText.font.color=Font Color: +viewer.utilityPane.annotation.freeText.font.color.ChooserTitle=Font Color +viewer.utilityPane.annotation.freeText.border.thickness=Border Thickness: +viewer.utilityPane.annotation.freeText.border.type=Border Type: +viewer.utilityPane.annotation.freeText.border.style=Border Style: +viewer.utilityPane.annotation.freeText.border.color=Border Color: +viewer.utilityPane.annotation.freeText.border.color.ChooserTitle=Border Color +viewer.utilityPane.annotation.freeText.fill.type=Fill Type: +viewer.utilityPane.annotation.freeText.fill.color=Fill Color: +viewer.utilityPane.annotation.freeText.transparencyLabel=Transparency: +viewer.utilityPane.annotation.freeText.fill.color.ChooserTitle=Fill Color +viewer.utilityPane.annotation.freeText.font.dialog=Dialog +viewer.utilityPane.annotation.freeText.font.dialogInput=DialogInput +viewer.utilityPane.annotation.freeText.font.monospaced=Monospaced +viewer.utilityPane.annotation.freeText.font.serif=Serif +viewer.utilityPane.annotation.freeText.font.sanSerif=SansSerif +viewer.utilityPane.annotation.freeText.font.style.plain=Plain +viewer.utilityPane.annotation.freeText.font.style.italic=Italic +viewer.utilityPane.annotation.freeText.font.style.bold=Bold +viewer.utilityPane.annotation.freeText.font.name.helvetica=Helvetica +viewer.utilityPane.annotation.freeText.font.name.helveticaOblique=Helvetica-Oblique +viewer.utilityPane.annotation.freeText.font.name.helveticaBold=Helvetica-Bold +viewer.utilityPane.annotation.freeText.font.name.HelveticaBoldOblique=Helvetica-BoldOblique +viewer.utilityPane.annotation.freeText.font.name.timesItalic=Times-Italic +viewer.utilityPane.annotation.freeText.font.name.timesBold=Times-Bold +viewer.utilityPane.annotation.freeText.font.name.timesBoldItalic=Times-BoldItalic +viewer.utilityPane.annotation.freeText.font.name.timesRoman=Times-Roman +viewer.utilityPane.annotation.freeText.font.name.courier=Courier +viewer.utilityPane.annotation.freeText.font.name.courierOblique=Courier-Oblique +viewer.utilityPane.annotation.freeText.font.name.courierBoldOblique=Courier-BoldOblique +viewer.utilityPane.annotation.freeText.font.name.courierBold=Courier-Bold ## Utility Pane Annotation text Tab -viewer.utilityPane.annotation.text.appearance.title = Text Annotation -viewer.utilityPane.annotation.text.iconName = Icon: -viewer.utilityPane.annotation.text.iconName.comment = Comment -viewer.utilityPane.annotation.text.iconName.check = Check -viewer.utilityPane.annotation.text.iconName.checkMark = CheckMark -viewer.utilityPane.annotation.text.iconName.circle = Circle -viewer.utilityPane.annotation.text.iconName.cross = Cross -viewer.utilityPane.annotation.text.iconName.crossHairs = CrossHairs -viewer.utilityPane.annotation.text.iconName.help = Help -viewer.utilityPane.annotation.text.iconName.insert = Insert -viewer.utilityPane.annotation.text.iconName.key = Key -viewer.utilityPane.annotation.text.iconName.newParagraph = NewParagraph -viewer.utilityPane.annotation.text.iconName.paragraph = Paragraph -viewer.utilityPane.annotation.text.iconName.rightArrow = RightArrow -viewer.utilityPane.annotation.text.iconName.rightPointer = RightPointer -viewer.utilityPane.annotation.text.iconName.star = Star -viewer.utilityPane.annotation.text.iconName.upArrow = UpArrow -viewer.utilityPane.annotation.text.iconName.upLeftArrow = UpLeftArrow +viewer.utilityPane.annotation.text.appearance.title=Text Annotation +viewer.utilityPane.annotation.text.iconName=Icon: +viewer.utilityPane.annotation.text.iconName.comment=Comment +viewer.utilityPane.annotation.text.iconName.check=Check +viewer.utilityPane.annotation.text.iconName.checkMark=CheckMark +viewer.utilityPane.annotation.text.iconName.circle=Circle +viewer.utilityPane.annotation.text.iconName.cross=Cross +viewer.utilityPane.annotation.text.iconName.crossHairs=CrossHairs +viewer.utilityPane.annotation.text.iconName.help=Help +viewer.utilityPane.annotation.text.iconName.insert=Insert +viewer.utilityPane.annotation.text.iconName.key=Key +viewer.utilityPane.annotation.text.iconName.newParagraph=NewParagraph +viewer.utilityPane.annotation.text.iconName.paragraph=Paragraph +viewer.utilityPane.annotation.text.iconName.rightArrow=RightArrow +viewer.utilityPane.annotation.text.iconName.rightPointer=RightPointer +viewer.utilityPane.annotation.text.iconName.star=Star +viewer.utilityPane.annotation.text.iconName.upArrow=UpArrow +viewer.utilityPane.annotation.text.iconName.upLeftArrow=UpLeftArrow ## Utility Pane Annotation circle Tab -viewer.utilityPane.annotation.circle.appearance.title = Circle Annotation -viewer.utilityPane.annotation.circle.lineThickness = Border Thickness: -viewer.utilityPane.annotation.circle.lineStyle = Border Style: -viewer.utilityPane.annotation.circle.colorBorderChooserTitle = Border Color -viewer.utilityPane.annotation.circle.colorInteriorChooserTitle = Interior Color -viewer.utilityPane.annotation.circle.colorBorderLabel = Border Color: -viewer.utilityPane.annotation.circle.colorInteriorLabel = Fill Color: -viewer.utilityPane.annotation.circle.fillTypeLabel = Fill Type: -viewer.utilityPane.annotation.circle.transparencyLabel = Transparency: +viewer.utilityPane.annotation.circle.appearance.title=Circle Annotation +viewer.utilityPane.annotation.circle.lineThickness=Border Thickness: +viewer.utilityPane.annotation.circle.lineStyle=Border Style: +viewer.utilityPane.annotation.circle.colorBorderChooserTitle=Border Color +viewer.utilityPane.annotation.circle.colorInteriorChooserTitle=Interior Color +viewer.utilityPane.annotation.circle.colorBorderLabel=Border Color: +viewer.utilityPane.annotation.circle.colorInteriorLabel=Fill Color: +viewer.utilityPane.annotation.circle.fillTypeLabel=Fill Type: +viewer.utilityPane.annotation.circle.transparencyLabel=Transparency: ## Utility Pane Annotation ink Tab -viewer.utilityPane.annotation.ink.appearance.title = Ink Annotation -viewer.utilityPane.annotation.ink.lineThickness = Ink Thickness: -viewer.utilityPane.annotation.ink.lineStyle = Ink Style: -viewer.utilityPane.annotation.ink.colorBorderChooserTitle = Ink Color -viewer.utilityPane.annotation.ink.colorBorderLabel = Ink Color: -viewer.utilityPane.annotation.ink.transparencyLabel = Transparency: +viewer.utilityPane.annotation.ink.appearance.title=Ink Annotation +viewer.utilityPane.annotation.ink.lineThickness=Ink Thickness: +viewer.utilityPane.annotation.ink.lineStyle=Ink Style: +viewer.utilityPane.annotation.ink.colorBorderChooserTitle=Ink Color +viewer.utilityPane.annotation.ink.colorBorderLabel=Ink Color: +viewer.utilityPane.annotation.ink.transparencyLabel=Transparency: ## Utility Pane border Tab -viewer.utilityPane.annotation.border.title = Border -viewer.utilityPane.annotation.border.linkType = Border Type: -viewer.utilityPane.annotation.border.lineThickness = Border Thickness: -viewer.utilityPane.annotation.border.lineStyle = Border Style: -viewer.utilityPane.annotation.border.colorChooserTitle = Border Color -viewer.utilityPane.annotation.border.colorLabel = Color: -viewer.utilityPane.annotation.border.borderType.visibleRectangle = Visible -viewer.utilityPane.annotation.border.borderType.invisibleRectangle = Invisible -viewer.utilityPane.annotation.border.solid = Solid -viewer.utilityPane.annotation.border.dashed = Dashed -viewer.utilityPane.annotation.border.beveled = Beveled -viewer.utilityPane.annotation.border.inset = Inset -viewer.utilityPane.annotation.border.underline = Underline +viewer.utilityPane.annotation.border.title=Border +viewer.utilityPane.annotation.border.linkType=Border Type: +viewer.utilityPane.annotation.border.lineThickness=Border Thickness: +viewer.utilityPane.annotation.border.lineStyle=Border Style: +viewer.utilityPane.annotation.border.colorChooserTitle=Border Color +viewer.utilityPane.annotation.border.colorLabel=Color: +viewer.utilityPane.annotation.border.borderType.visibleRectangle=Visible +viewer.utilityPane.annotation.border.borderType.invisibleRectangle=Invisible +viewer.utilityPane.annotation.border.solid=Solid +viewer.utilityPane.annotation.border.dashed=Dashed +viewer.utilityPane.annotation.border.beveled=Beveled +viewer.utilityPane.annotation.border.inset=Inset +viewer.utilityPane.annotation.border.underline=Underline ## Utility Pane border Tab -viewer.utilityPane.annotation.flags.title = Flags -viewer.utilityPane.annotation.flags.noRotate = No Rotate: -viewer.utilityPane.annotation.flags.noZoom = No Zoom: -viewer.utilityPane.annotation.flags.readOnly = Read Only: -viewer.utilityPane.annotation.flags.printable = Printable: -viewer.utilityPane.annotation.flags.yes = Printable: -viewer.utilityPane.annotation.flags.enabled = Enabled -viewer.utilityPane.annotation.flags.disabled = Disabled +viewer.utilityPane.annotation.flags.title=Flags +viewer.utilityPane.annotation.flags.noRotate=No Rotate: +viewer.utilityPane.annotation.flags.noZoom=No Zoom: +viewer.utilityPane.annotation.flags.readOnly=Read Only: +viewer.utilityPane.annotation.flags.printable=Printable: +viewer.utilityPane.annotation.flags.yes=Printable: +viewer.utilityPane.annotation.flags.enabled=Enabled +viewer.utilityPane.annotation.flags.disabled=Disabled ## annotation action pane and dialogs. -viewer.utilityPane.action.selectionTitle = Action -viewer.utilityPane.action.addAction = Add -viewer.utilityPane.action.editAction = Edit -viewer.utilityPane.action.removeAction = Remove -viewer.utilityPane.action.type.destination.label = Destination -viewer.utilityPane.action.type.uriAction.label = URI Action -viewer.utilityPane.action.type.goToAction.label = GoTo Action -viewer.utilityPane.action.type.launchAction.label = Launch Action -viewer.utilityPane.action.dialog.new.title = Add New Action -viewer.utilityPane.action.dialog.new.msgs = Action Type: -viewer.utilityPane.action.dialog.delete.title = Delete Confirmation -viewer.utilityPane.action.dialog.delete.msgs = Are you sure your want to delete this action? +viewer.utilityPane.action.selectionTitle=Action +viewer.utilityPane.action.addAction=Add +viewer.utilityPane.action.editAction=Edit +viewer.utilityPane.action.removeAction=Remove +viewer.utilityPane.action.type.destination.label=Destination +viewer.utilityPane.action.type.uriAction.label=URI Action +viewer.utilityPane.action.type.goToAction.label=GoTo Action +viewer.utilityPane.action.type.launchAction.label=Launch Action +viewer.utilityPane.action.dialog.new.title=Add New Action +viewer.utilityPane.action.dialog.new.msgs=Action Type: +viewer.utilityPane.action.dialog.delete.title=Delete Confirmation +viewer.utilityPane.action.dialog.delete.msgs=Are you sure your want to delete this action? ## uri action dialog test -viewer.utilityPane.action.dialog.uri.title = URI Action Properties -viewer.utilityPane.action.dialog.uri.msgs = URI: +viewer.utilityPane.action.dialog.uri.title=URI Action Properties +viewer.utilityPane.action.dialog.uri.msgs=URI: ## launch action dialog test -viewer.utilityPane.action.dialog.launch.title = Launch Action Properties -viewer.utilityPane.action.dialog.launch.msgs = File Path: +viewer.utilityPane.action.dialog.launch.title=Launch Action Properties +viewer.utilityPane.action.dialog.launch.msgs=File Path: ## GoTo action dialog text -viewer.utilityPane.action.dialog.goto.title = GoTo Action Properties -viewer.utilityPane.action.dialog.goto.page.label = Page: -viewer.utilityPane.action.dialog.goto.type.label = Type -viewer.utilityPane.action.dialog.goto.type.xyz.label = Absolute -viewer.utilityPane.action.dialog.goto.type.fit.label = Fit Page -viewer.utilityPane.action.dialog.goto.type.fith.label = Fit Top Width -viewer.utilityPane.action.dialog.goto.type.fitv.label = Fit Left Width -viewer.utilityPane.action.dialog.goto.type.fitr.label = Fit Zoom Box -viewer.utilityPane.action.dialog.goto.type.fitb.label = Fit Page Bounds -viewer.utilityPane.action.dialog.goto.type.fitbh.label = Fit Bounds Top -viewer.utilityPane.action.dialog.goto.type.fitbv.label = Fit Bounds Left -viewer.utilityPane.action.dialog.goto.right.label = Right: -viewer.utilityPane.action.dialog.goto.left.label = Left: -viewer.utilityPane.action.dialog.goto.top.label = Top: -viewer.utilityPane.action.dialog.goto.bottom.label = Bottom: -viewer.utilityPane.action.dialog.goto.zoom.label = Zoom: -viewer.utilityPane.action.dialog.goto.unassigned.label = NaN -viewer.utilityPane.action.dialog.goto.current.label = Current View: -viewer.utilityPane.action.dialog.goto.current = Set Location -viewer.utilityPane.action.dialog.goto.name.label = Name: -viewer.utilityPane.action.dialog.goto.browse = Browse... -viewer.utilityPane.action.dialog.goto.explicitDestination.title = Implicit Destination -viewer.utilityPane.action.dialog.goto.nameDestination.title = Named Destination +viewer.utilityPane.action.dialog.goto.title=GoTo Action Properties +viewer.utilityPane.action.dialog.goto.page.label=Page: +viewer.utilityPane.action.dialog.goto.type.label=Type +viewer.utilityPane.action.dialog.goto.type.xyz.label=Absolute +viewer.utilityPane.action.dialog.goto.type.fit.label=Fit Page +viewer.utilityPane.action.dialog.goto.type.fith.label=Fit Top Width +viewer.utilityPane.action.dialog.goto.type.fitv.label=Fit Left Width +viewer.utilityPane.action.dialog.goto.type.fitr.label=Fit Zoom Box +viewer.utilityPane.action.dialog.goto.type.fitb.label=Fit Page Bounds +viewer.utilityPane.action.dialog.goto.type.fitbh.label=Fit Bounds Top +viewer.utilityPane.action.dialog.goto.type.fitbv.label=Fit Bounds Left +viewer.utilityPane.action.dialog.goto.right.label=Right: +viewer.utilityPane.action.dialog.goto.left.label=Left: +viewer.utilityPane.action.dialog.goto.top.label=Top: +viewer.utilityPane.action.dialog.goto.bottom.label=Bottom: +viewer.utilityPane.action.dialog.goto.zoom.label=Zoom: +viewer.utilityPane.action.dialog.goto.unassigned.label=NaN +viewer.utilityPane.action.dialog.goto.current.label=Current View: +viewer.utilityPane.action.dialog.goto.current=Set Location +viewer.utilityPane.action.dialog.goto.name.label=Name: +viewer.utilityPane.action.dialog.goto.browse=Browse... +viewer.utilityPane.action.dialog.goto.explicitDestination.title=Implicit Destination +viewer.utilityPane.action.dialog.goto.nameDestination.title=Named Destination # Destination Named Tree -viewer.utilityPane.action.dialog.goto.nameTree.title = Document Name Tree -viewer.utilityPane.action.dialog.goto.nameTree.root.label = Name Tree -viewer.utilityPane.action.dialog.goto.nameTree.branch.label = {0} to {1} +viewer.utilityPane.action.dialog.goto.nameTree.title=Document Name Tree +viewer.utilityPane.action.dialog.goto.nameTree.root.label=Name Tree +viewer.utilityPane.action.dialog.goto.nameTree.branch.label={0} to {1} ## Utility Pane Search Tab -viewer.utilityPane.search.tab.title = Search -viewer.utilityPane.search.searchText.label = Search Text: -viewer.utilityPane.search.results.label = Results: -viewer.utilityPane.search.searchButton.label = Search -viewer.utilityPane.search.clearSearchButton.label = Clear -viewer.utilityPane.search.caseSenstiveCheckbox.label = Case-sensitive -viewer.utilityPane.search.wholeWordCheckbox.label = Whole words only -viewer.utilityPane.search.cumlitiveCheckbox.label = Cumulative -viewer.utilityPane.search.showPagesCheckbox.label = Show Pages -viewer.utilityPane.search.stopButton.label = Stop -viewer.utilityPane.search.searching.msg = Search... +viewer.utilityPane.search.tab.title=Search +viewer.utilityPane.search.searchText.label=Search Text: +viewer.utilityPane.search.results.label=Results: +viewer.utilityPane.search.searchButton.label=Search +viewer.utilityPane.search.clearSearchButton.label=Clear +viewer.utilityPane.search.caseSenstiveCheckbox.label=Case-sensitive +viewer.utilityPane.search.wholeWordCheckbox.label=Whole words only +viewer.utilityPane.search.cumlitiveCheckbox.label=Cumulative +viewer.utilityPane.search.showPagesCheckbox.label=Show Pages +viewer.utilityPane.search.stopButton.label=Stop +viewer.utilityPane.search.searching.msg=Search... # Searching x out of y page(s) -viewer.utilityPane.search.searching1.msg = \ - Searching {0} out of {1} -viewer.utilityPane.search.searching1.oneFile.msg = {2} page -viewer.utilityPane.search.searching1.moreFile.msg = {2} pages +viewer.utilityPane.search.searching1.msg=Searching {0} out of {1} +viewer.utilityPane.search.searching1.oneFile.msg={2} page +viewer.utilityPane.search.searching1.moreFile.msg={2} pages # Page x (y result(s)) -viewer.utilityPane.search.result.msg = Page {0} ({1}) -viewer.utilityPane.search.result.oneFile.msg = {2} result -viewer.utilityPane.search.result.moreFile.msg = {2} results +viewer.utilityPane.search.result.msg=Page {0} ({1}) +viewer.utilityPane.search.result.oneFile.msg={2} result +viewer.utilityPane.search.result.moreFile.msg={2} results # Searched x page(s) (y matches) -viewer.utilityPane.search.progress.msg = \ - Searched {0} {1} ({2}) -viewer.utilityPane.search.progress.onePage.msg = page -viewer.utilityPane.search.progress.morePage.msg = pages -viewer.utilityPane.search.progress.oneMatch.msg = {2} match -viewer.utilityPane.search.progress.moreMatch.msg = {2} matches +viewer.utilityPane.search.progress.msg=Searched {0} {1} ({2}) +viewer.utilityPane.search.progress.onePage.msg=page +viewer.utilityPane.search.progress.morePage.msg=pages +viewer.utilityPane.search.progress.oneMatch.msg={2} match +viewer.utilityPane.search.progress.moreMatch.msg={2} matches ## Popup Annotation component -viewer.annotation.popup.reply.label = Reply -viewer.annotation.popup.delete.label = Delete -viewer.annotation.popup.status.label = Set Status -viewer.annotation.popup.status.accepted.label = Accepted -viewer.annotation.popup.status.cancelled.label = Cancelled -viewer.annotation.popup.status.completed.label = Completed -viewer.annotation.popup.status.rejected.label = Rejected -viewer.annotation.popup.status.none.label = None -viewer.annotation.popup.openAll.label = Open all Popups -viewer.annotation.popup.minimizeAll.label = Minimize Popups -viewer.annotation.popup.replyTo.label = Re: {0} -viewer.annotation.popup.status.none.title = None: {0} -viewer.annotation.popup.status.none.msg = None set by {0} -viewer.annotation.popup.status.accepted.title = Accepted: {0} -viewer.annotation.popup.status.accepted.msg = Accepted set by {0} -viewer.annotation.popup.status.cancelled.title = Cancelled: {0} -viewer.annotation.popup.status.cancelled.msg = Cancelled set by {0} -viewer.annotation.popup.status.completed.title = Completed: {0} -viewer.annotation.popup.status.completed.msg = Completed set by {0} -viewer.annotation.popup.status.rejected.title = Rejected: {0} -viewer.annotation.popup.status.rejected.msg = Rejected set by {0} +viewer.annotation.popup.reply.label=Reply +viewer.annotation.popup.delete.label=Delete +viewer.annotation.popup.status.label=Set Status +viewer.annotation.popup.status.accepted.label=Accepted +viewer.annotation.popup.status.cancelled.label=Cancelled +viewer.annotation.popup.status.completed.label=Completed +viewer.annotation.popup.status.rejected.label=Rejected +viewer.annotation.popup.status.none.label=None +viewer.annotation.popup.openAll.label=Open all Popups +viewer.annotation.popup.minimizeAll.label=Minimize Popups +viewer.annotation.popup.replyTo.label=Re: {0} +viewer.annotation.popup.status.none.title=None: {0} +viewer.annotation.popup.status.none.msg=None set by {0} +viewer.annotation.popup.status.accepted.title=Accepted: {0} +viewer.annotation.popup.status.accepted.msg=Accepted set by {0} +viewer.annotation.popup.status.cancelled.title=Cancelled: {0} +viewer.annotation.popup.status.cancelled.msg=Cancelled set by {0} +viewer.annotation.popup.status.completed.title=Completed: {0} +viewer.annotation.popup.status.completed.msg=Completed set by {0} +viewer.annotation.popup.status.rejected.title=Rejected: {0} +viewer.annotation.popup.status.rejected.msg=Rejected set by {0} ## Signature component -viewer.annotation.signature.menu.validateSignature.label = Validate Signature -viewer.annotation.signature.menu.showCertificates.label = Show Certificate Properties -viewer.annotation.signature.menu.signatureProperties.label = Show Signature Properties -viewer.annotation.signature.menu.signaturePageNavigation.label = Go to Page... +viewer.annotation.signature.menu.validateSignature.label=Validate Signature +viewer.annotation.signature.menu.showCertificates.label=Show Certificate Properties +viewer.annotation.signature.menu.signatureProperties.label=Show Signature Properties +viewer.annotation.signature.menu.signaturePageNavigation.label=Go to Page... ## Signature validation dialog. -viewer.annotation.signature.validation.dialog.title = Signature Validation Summary -viewer.annotation.signature.validation.dialog.close.button.label = Close -viewer.annotation.signature.validation.dialog.signerProperties.button.label = Signature Properties... +viewer.annotation.signature.validation.dialog.title=Signature Validation Summary +viewer.annotation.signature.validation.dialog.close.button.label=Close +viewer.annotation.signature.validation.dialog.signerProperties.button.label=Signature Properties... # common validation messages -viewer.annotation.signature.validation.common.invalid.label = Signature is invalid: -viewer.annotation.signature.validation.common.unknown.label = Signature is valid: -viewer.annotation.signature.validation.common.valid.label = Signature validity is unknown: -viewer.annotation.signature.validation.common.signedBy.label = - Signed by {0} {1} -viewer.annotation.signature.validation.common.doc.modified.label = \ - - This version of the document is unaltered but subsequent changes have been made -viewer.annotation.signature.validation.common.doc.unmodified.label = - Document has not been modified since it was signed -viewer.annotation.signature.validation.common.doc.major.label = - Document has been altered or corrupted since it was signed -viewer.annotation.signature.validation.common.identity.unknown.label = \ - - Signer's identity is unknown because it could not be found in your keystore -viewer.annotation.signature.validation.common.identity.unchecked.label = \ - - Signature is valid, but revocation of the signer's identity could not be checked -viewer.annotation.signature.validation.common.identity.valid.label = - Signer's identity is valid -viewer.annotation.signature.validation.common.time.local.label = - Signing time is from the clock on this signer's computer -viewer.annotation.signature.validation.common.time.embedded.label = \ - - Signature included an embedded timestamp but it could not be validated -viewer.annotation.signature.validation.common.notAvailable.label = N/A +viewer.annotation.signature.validation.common.invalid.label=Signature is invalid: +viewer.annotation.signature.validation.common.unknown.label=Signature is valid: +viewer.annotation.signature.validation.common.valid.label=Signature validity is unknown: +viewer.annotation.signature.validation.common.signedBy.label=- Signed by {0} {1} +viewer.annotation.signature.validation.common.doc.modified.label=- This version of the document is unaltered but subsequent changes have been made +viewer.annotation.signature.validation.common.doc.unmodified.label=- Document has not been modified since it was signed +viewer.annotation.signature.validation.common.doc.major.label=- Document has been altered or corrupted since it was signed +viewer.annotation.signature.validation.common.identity.unknown.label=- Signer's identity is unknown because it could not be found in your keystore +viewer.annotation.signature.validation.common.identity.unchecked.label=- Signature is valid, but revocation of the signer's identity could not be checked +viewer.annotation.signature.validation.common.identity.valid.label=- Signer's identity is valid +viewer.annotation.signature.validation.common.time.local.label=- Signing time is from the clock on this signer's computer +viewer.annotation.signature.validation.common.time.embedded.label=- Signature included an embedded timestamp but it could not be validated +viewer.annotation.signature.validation.common.notAvailable.label=N/A ## Signatures properties Dialog. -viewer.annotation.signature.properties.dialog.title = Signature Properties -viewer.annotation.signature.properties.dialog.invalid.label = Signature is invalid -viewer.annotation.signature.properties.dialog.unknown.label = Signature is valid -viewer.annotation.signature.properties.dialog.valid.label = Signature validity is unknown -viewer.annotation.signature.properties.dialog.signedBy.label = Signed by {0} {1} -viewer.annotation.signature.properties.dialog.signingTime.label = Signed time: {0} -viewer.annotation.signature.properties.dialog.reason.label = Reason: {0} -viewer.annotation.signature.properties.dialog.location.label = Location: {0} +viewer.annotation.signature.properties.dialog.title=Signature Properties +viewer.annotation.signature.properties.dialog.invalid.label=Signature is invalid +viewer.annotation.signature.properties.dialog.unknown.label=Signature is valid +viewer.annotation.signature.properties.dialog.valid.label=Signature validity is unknown +viewer.annotation.signature.properties.dialog.signedBy.label=Signed by {0} {1} +viewer.annotation.signature.properties.dialog.signingTime.label=Signed time: {0} +viewer.annotation.signature.properties.dialog.reason.label=Reason: {0} +viewer.annotation.signature.properties.dialog.location.label=Location: {0} # SignatureSigner Info -viewer.annotation.signature.properties.dialog.pathValidation.success = - Path validation checks were successful. -viewer.annotation.signature.properties.dialog.pathValidation.failure = - Path validation checks were unsuccessful. -viewer.annotation.signature.properties.dialog.revocation.success = - Signer's certificate is valid and has not been revoked. -viewer.annotation.signature.properties.dialog.revocation.failure = - Revocation checking was not performed. -viewer.annotation.signature.properties.dialog.certificateExpired.failure = - Signer certificate has expired. -viewer.annotation.signature.properties.dialog.showCertificates.label = Signer's Certificate... -viewer.annotation.signature.properties.dialog.validity.title = Validity Summary -viewer.annotation.signature.properties.dialog.signerInfo.title = Signer Info +viewer.annotation.signature.properties.dialog.pathValidation.success=- Path validation checks were successful. +viewer.annotation.signature.properties.dialog.pathValidation.failure=- Path validation checks were unsuccessful. +viewer.annotation.signature.properties.dialog.revocation.success=- Signer's certificate is valid and has not been revoked. +viewer.annotation.signature.properties.dialog.revocation.failure=- Revocation checking was not performed. +viewer.annotation.signature.properties.dialog.certificateExpired.failure=- Signer certificate has expired. +viewer.annotation.signature.properties.dialog.showCertificates.label=Signer's Certificate... +viewer.annotation.signature.properties.dialog.validity.title=Validity Summary +viewer.annotation.signature.properties.dialog.signerInfo.title=Signer Info ## Common Button Labels -viewer.button.ok.label = Ok -viewer.button.ok.mnemonic = O -viewer.button.cancel.label = Cancel -viewer.button.cancel.mnemonic = C +viewer.button.ok.label=Ok +viewer.button.ok.mnemonic=O +viewer.button.cancel.label=Cancel +viewer.button.cancel.mnemonic=C ## Pilot Specific Mesages -pilot.title = ICEbrowser - ICEpdf Pilot Errror -pilot.loading.msg =Opening document {0} ... -pilot.display.msg = Displaying {0} -pilot.loading.error.msg = PDF Pilot: Failed to load {0}. -pilot.error.classLoading = Required class {0} not found. Required library \ - 'icepdf.jar' may not be on the classpath - PDF Pilot disabled."; +pilot.title=ICEbrowser - ICEpdf Pilot Errror +pilot.loading.msg=Opening document {0} ... +pilot.display.msg=Displaying {0} +pilot.loading.error.msg=PDF Pilot: Failed to load {0}. +pilot.error.classLoading=Required class {0} not found. Required library 'icepdf.jar' may not be on the classpath - PDF Pilot disabled."; ### # General Error Messages # Command Line Errors -viewer.commandLin.error = \ - Usage: java org.icepdf.ri.viewer.Main [-loadfile ] [-loadurl ] +viewer.commandLin.error=Usage: java org.icepdf.ri.viewer.Main [-loadfile ] [-loadurl ] # Launcher errors -viewer.launcher.URLError.dialog.title =ICEsoft ICEpdf -viewer.launcher.URLError.dialog.message = ICEpdf could not open the specified file. {0} at URL: {1}. -viewer.launcher.lookAndFeel.error.message = The specified look-and-feel ({0}) is not accessible from this platform. +viewer.launcher.URLError.dialog.title=ICEsoft ICEpdf +viewer.launcher.URLError.dialog.message=ICEpdf could not open the specified file. {0} at URL: {1}. +viewer.launcher.lookAndFeel.error.message=The specified look-and-feel ({0}) is not accessible from this platform. # Pilot Loading Errors ### parser error dialogs -parse.title = Properties Parsing Error -parse.integer = Warning : {0} is not a correct integer. -parse.float = Warning : {0} is not a correct float. -parse.double = Warning : {0} is not a correct double. -parse.choice = Warning : {0} is not a valid choice. -parse.laf = Warning : look-and-feel {0} is not supported. +parse.title=Properties Parsing Error +parse.integer=Warning : {0} is not a correct integer. +parse.float=Warning : {0} is not a correct float. +parse.double=Warning : {0} is not a correct double. +parse.choice=Warning : {0} is not a valid choice. +parse.laf=Warning : look-and-feel {0} is not supported. ### Properties Manager Errors -manager.properties.title = ICEpdf Properties Manager -fontManager.properties.title = ICEpdf Font Manager +manager.properties.title=ICEpdf Properties Manager +fontManager.properties.title=ICEpdf Font Manager -manager.properties.createNewDirectory = \ - To create the directory {0},\n\ - where the ICEpdf Viewer will store changes to its setup, click Yes.\n\n\ - If you click "No", all changes you make to the ICEpdf Viewer setup\n\ - will be lost when you quit the application. \n\n +manager.properties.createNewDirectory=To create the directory {0},\nwhere the ICEpdf Viewer will store changes to its setup, click Yes.\n\nIf you click "No", all changes you make to the ICEpdf Viewer setup\nwill be lost when you quit the application. \n\n -manager.properties.failedCreation = \ - ICEpdf Viewer directory to store user data can not be created:\n\ - {0}\n\ - ICEpdf Viewer will not save changes to its default setup. +manager.properties.failedCreation=ICEpdf Viewer directory to store user data can not be created:\n{0}\nICEpdf Viewer will not save changes to its default setup. -manager.properties.session.nolock = \ - Error creating the lock file :\n\ - {0}\n +manager.properties.session.nolock=Error creating the lock file :\n{0}\n -manager.properties.session.readError = \ - Error loading properties file: \n\ - {0} +manager.properties.session.readError=Error loading properties file: \n{0} -manager.properties.deleted = Property file has been deleted\n\ - ({0})\n\ - Recreate it ? +manager.properties.deleted=Property file has been deleted\n({0})\nRecreate it ? -manager.properties.modified = Property file has been modified since last update\n\ -({0,date,long})\n\ -Would you like to merge changes in the file with the current properties? +manager.properties.modified=Property file has been modified since last update\n({0,date,long})\nWould you like to merge changes in the file with the current properties? -manager.properties.saveError = Impossible to save property file.\n\ -Encountered the folowing error :\n\ -{0} +manager.properties.saveError=Impossible to save property file.\nEncountered the folowing error :\n{0} -manager.properties.lafError =\ - Look&Feel {0} given in the default properties is unsupported.\n\ - Using system default. +manager.properties.lafError=Look&Feel {0} given in the default properties is unsupported.\nUsing system default. -manager.properties.brokenProperty = Broken default property {0} value: {1} +manager.properties.brokenProperty=Broken default property {0} value: {1} -manager.properties.missingProperty = Missing default property {0} value: {1} +manager.properties.missingProperty=Missing default property {0} value: {1} diff --git a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED index 1d50092e80..51f1208f61 100755 --- a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED @@ -3,13 +3,7 @@ Installer.closing.confirmationDialog.title=Ingest is Running # {0} - exception message Installer.closing.messageBox.caseCloseExceptionMessage=Error closing case: {0} OpenIDE-Module-Display-Category=Infrastructure -OpenIDE-Module-Long-Description=\ - This is the core Autopsy module.\n\n\ - The module contains the core components needed for the bare application to run; the RCP platform, windowing GUI, sleuthkit bindings, datamodel / storage, explorer, result viewers, content viewers, ingest framework, reporting, and core tools, such as the file search.\n\n\ - The framework included in the module contains APIs for developing modules for ingest, viewers and reporting. \ - The modules can be deployed as Plugins using the Autopsy plugin installer.\n\ - This module should not be uninstalled - without it, Autopsy will not run.\n\n\ - For more information, see http://www.sleuthkit.org/autopsy/ +OpenIDE-Module-Long-Description=This is the core Autopsy module.\n\nThe module contains the core components needed for the bare application to run; the RCP platform, windowing GUI, sleuthkit bindings, datamodel / storage, explorer, result viewers, content viewers, ingest framework, reporting, and core tools, such as the file search.\n\nThe framework included in the module contains APIs for developing modules for ingest, viewers and reporting. The modules can be deployed as Plugins using the Autopsy plugin installer.\nThis module should not be uninstalled - without it, Autopsy will not run.\n\nFor more information, see http://www.sleuthkit.org/autopsy/ OpenIDE-Module-Name=Autopsy-Core OpenIDE-Module-Short-Description=Autopsy Core Module org_sleuthkit_autopsy_core_update_center=http://sleuthkit.org/autopsy/updates.xml diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED index 0636340b0b..609d68bcd1 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED @@ -75,9 +75,9 @@ DataContentViewerHex.totalPageLabel.text_1=100 DataContentViewerHex.pageLabel2.text=Page # Product Information panel -LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
+LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
Format_OperatingSystem_Value={0} version {1} running on {2} -LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
+LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
SortChooser.dialogTitle=Choose Sort Criteria ThumbnailViewChildren.progress.cancelling=(Cancelling) # {0} - file name @@ -105,7 +105,7 @@ DataResultViewerThumbnail.pageNextButton.text= DataResultViewerThumbnail.imagesLabel.text=Images: DataResultViewerThumbnail.imagesRangeLabel.text=- DataResultViewerThumbnail.pageNumLabel.text=- -DataResultViewerThumbnail.filePathLabel.text=\ \ \ +DataResultViewerThumbnail.filePathLabel.text=\ DataResultViewerThumbnail.goToPageLabel.text=Go to Page: DataResultViewerThumbnail.goToPageField.text= AdvancedConfigurationDialog.cancelButton.text=Cancel diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED index a0d535f8e6..18e279dd2c 100755 --- a/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED @@ -30,9 +30,7 @@ PlatformUtil.getProcVmUsed.sigarNotInit.msg=Cannot get virt mem used, sigar not PlatformUtil.getProcVmUsed.gen.msg=Cannot get virt mem used, {0} PlatformUtil.getJvmMemInfo.usageText=JVM heap usage: {0}, JVM non-heap usage: {1} PlatformUtil.getPhysicalMemInfo.usageText=Physical memory usage (max, total, free): {0}, {1}, {2} -PlatformUtil.getAllMemUsageInfo.usageText={0}\n\ -{1}\n\ -Process Virtual Memory: {2} +PlatformUtil.getAllMemUsageInfo.usageText={0}\n{1}\nProcess Virtual Memory: {2} # {0} - file name ReadImageTask.mesageText=Reading image: {0} StringExtract.illegalStateException.cannotInit.msg=Unicode table not properly initialized, cannot instantiate StringExtract diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED index d753a6e329..19a399957a 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED @@ -313,10 +313,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window ImageNode.createSheet.name.name=Name ImageNode.createSheet.name.displayName=Name ImageNode.createSheet.name.desc=no description -Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\! -Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\! -Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0} -Installer.tskLibErr.err=Fatal Error\! +Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null! +Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""! +Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0} +Installer.tskLibErr.err=Fatal Error! InterestingHits.interestingItems.text=INTERESTING ITEMS InterestingHits.displayName.text=Interesting Items InterestingHits.createSheet.name.name=Name diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED index 075a0e7afb..b6e3f1b3f9 100755 --- a/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED @@ -19,7 +19,7 @@ KnownStatusSearchPanel.knownCheckBox.text=Known Status: KnownStatusSearchPanel.knownBadOptionCheckBox.text=Notable KnownStatusSearchPanel.knownOptionCheckBox.text=Known (NSRL or other) KnownStatusSearchPanel.unknownOptionCheckBox.text=Unknown -DateSearchFilter.noneSelectedMsg.text=At least one date type must be selected\! +DateSearchFilter.noneSelectedMsg.text=At least one date type must be selected! DateSearchPanel.dateCheckBox.text=Date: DateSearchPanel.jLabel4.text=Timezone: DateSearchPanel.createdCheckBox.text=Created @@ -60,7 +60,7 @@ FileSearchPanel.search.results.details=Large number of matches may impact perfor FileSearchPanel.search.exception.noFilterSelected.msg=At least one filter must be selected. FileSearchPanel.search.validationErr.msg=Validation Error: {0} FileSearchPanel.emptyWhereClause.text=Invalid options, nothing to show. -KnownStatusSearchFilter.noneSelectedMsg.text=At least one known status must be selected\! +KnownStatusSearchFilter.noneSelectedMsg.text=At least one known status must be selected! NameSearchFilter.emptyNameMsg.text=Must enter something for name search. SizeSearchPanel.sizeCompareComboBox.equalTo=equal to SizeSearchPanel.sizeCompareComboBox.greaterThan=greater than diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED index 11fbd0a9d8..a99e8f1b9e 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED @@ -144,7 +144,7 @@ IngestJob.cancelReason.outOfDiskSpace.text=Out of disk space IngestJob.cancelReason.servicesDown.text=Services Down IngestJob.cancelReason.caseClosed.text=Case closed IngestJobSettingsPanel.globalSettingsButton.text=Global Settings -gest +gest= IngestJobSettingsPanel.globalSettingsButton.actionCommand=Advanced IngestJobSettingsPanel.globalSettingsButton.text=Global Settings IngestJobSettingsPanel.pastJobsButton.text=History diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED index 6f7251676d..57f7e7ff9b 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED @@ -12,12 +12,7 @@ ExtractArchiveWithPasswordAction.progress.text=Unpacking contents of archive: {0 ExtractArchiveWithPasswordAction.prompt.text=Enter Password ExtractArchiveWithPasswordAction.prompt.title=Enter Password OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=\ - Embedded File Extraction Ingest Module\n\nThe Embedded File Extraction Ingest Module processes document files (such as doc, docx, ppt, pptx, xls, xlsx) and archive files (such as zip and others archive types supported by the 7zip extractor).\n\ - Contents of these files are extracted and the derived files are added back to the current ingest to be processed by the configured ingest modules.\n\ - If the derived file happens to be an archive file, it will be re-processed by the 7zip extractor - the extractor will process archive files N-levels deep.\n\n\ - The extracted files are navigable in the directory tree.\n\n\ - The module is supported on Windows, Linux and Mac operating systems. +OpenIDE-Module-Long-Description=Embedded File Extraction Ingest Module\n\nThe Embedded File Extraction Ingest Module processes document files (such as doc, docx, ppt, pptx, xls, xlsx) and archive files (such as zip and others archive types supported by the 7zip extractor).\nContents of these files are extracted and the derived files are added back to the current ingest to be processed by the configured ingest modules.\nIf the derived file happens to be an archive file, it will be re-processed by the 7zip extractor - the extractor will process archive files N-levels deep.\n\nThe extracted files are navigable in the directory tree.\n\nThe module is supported on Windows, Linux and Mac operating systems. OpenIDE-Module-Name=Embedded File Extraction OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED index cfaadf1635..5063bd55fa 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED @@ -36,27 +36,27 @@ FileExtMismatchSettingsPanel.jLabel1.text=File Types: FileExtMismatchSettingsPanel.newExtButton.text=New Extension FileExtMismatchSettingsPanel.newMimePrompt.message=Add a new MIME file type: FileExtMismatchSettingsPanel.newMimePrompt.title=New MIME -FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.message=MIME type text is empty\! +FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.message=MIME type text is empty! FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.title=Empty type -FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.message=MIME type not supported\! +FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.message=MIME type not supported! FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.title=Type not supported -FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.message=MIME type already exists\! +FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.message=MIME type already exists! FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.title=Type already exists FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.message=MIME type is not detectable by this module. FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.title=Type not detectable -FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.message=No MIME type selected\! +FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.message=No MIME type selected! FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.title=No type selected FileExtMismatchSettingsPanel.newExtPrompt.message=Add an allowed extension: FileExtMismatchSettingsPanel.newExtPrompt.title=New allowed extension -FileExtMismatchSettingsPanel.newExtPrompt.empty.message=Extension text is empty\! +FileExtMismatchSettingsPanel.newExtPrompt.empty.message=Extension text is empty! FileExtMismatchSettingsPanel.newExtPrompt.empty.title=Extension text empty -FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.message=No MIME type selected\! +FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.message=No MIME type selected! FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.title=No MIME type selected -FileExtMismatchSettingsPanel.newExtPrompt.extExists.message=Extension already exists\! +FileExtMismatchSettingsPanel.newExtPrompt.extExists.message=Extension already exists! FileExtMismatchSettingsPanel.newExtPrompt.extExists.title=Extension already exists -FileExtMismatchSettingsPanel.removeExtButton.noneSelected.message=No extension selected\! +FileExtMismatchSettingsPanel.removeExtButton.noneSelected.message=No extension selected! FileExtMismatchSettingsPanel.removeExtButton.noneSelected.title=No extension selected -FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.message=No MIME type selected\! +FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.message=No MIME type selected! FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.title=No MIME type selected FileExtMismatchSettingsPanel.removeTypeButton.toolTipText= FileExtMismatchModuleSettingsPanel.checkAllRadioButton.text=Check all file types diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED index dd5aa258cc..8dbb55e35f 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED @@ -61,10 +61,7 @@ ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash se ImportCentralRepoDbProgressDialog.linesProcessed.message=\ hashes processed ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=\ - Hash Set ingest module. \n\n\ - The ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\n\ - The module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. +OpenIDE-Module-Long-Description=Hash Set ingest module. \n\nThe ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\nThe module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. OpenIDE-Module-Name=HashDatabases OptionsCategory_Name_HashDatabase=Hash Sets OptionsCategory_Keywords_HashDatabase=Hash Sets @@ -191,10 +188,7 @@ HashDbSearchThread.name.searching=Searching HashDbSearchThread.noMoreFilesWithMD5Msg=No other files with the same MD5 hash were found. ModalNoButtons.indexingDbsTitle=Indexing hash sets ModalNoButtons.indexingDbTitle=Indexing hash set -ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \n\ -The generated index will be left unusable. If you choose to continue,\n\ - please delete the corresponding -md5.idx file in the hash folder.\n\ - Exit indexing? +ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \nThe generated index will be left unusable. If you choose to continue,\nplease delete the corresponding -md5.idx file in the hash folder.\nExit indexing? ModalNoButtons.dlgTitle.unfinishedIndexing=Unfinished Indexing ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 hash set ModalNoButtons.indexThese.currentlyIndexing1OfNDbs=Currently indexing 1 of {0} diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED index 6fb258f014..cccbcc1b57 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED @@ -2,7 +2,6 @@ FilesIdentifierIngestJobSettingsPanel.getError=Error getting interesting files s FilesIdentifierIngestJobSettingsPanel.updateError=Error updating interesting files sets settings file. FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file. FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search. -# {0} - daysIncluded FilesSet.rule.dateRule.toString=(modified within {0} day(s)) FilesSetDefsPanel.bytes=Bytes FilesSetDefsPanel.cancelImportMsg=Cancel import @@ -122,8 +121,8 @@ FilesSetRulePanel.nameTextField.text= FilesSetRulePanel.ruleNameLabel.text=Rule Name (Optional): FilesSetRulePanel.messages.emptyNameCondition=You must specify a name pattern for this rule. FilesSetRulePanel.messages.invalidNameRegex=The name regular expression is not valid:\n\n{0} -FilesSetRulePanel.messages.invalidCharInName=The name cannot contain \\, /, :, *, ?, \", <, or > unless it is a regular expression. -FilesSetRulePanel.messages.invalidCharInPath=The path cannot contain \\, :, *, ?, \", <, or > unless it is a regular expression. +FilesSetRulePanel.messages.invalidCharInName=The name cannot contain \\, /, :, *, ?, ", <, or > unless it is a regular expression. +FilesSetRulePanel.messages.invalidCharInPath=The path cannot contain \\, :, *, ?, ", <, or > unless it is a regular expression. FilesSetRulePanel.messages.invalidPathRegex=The path regular expression is not valid:\n\n{0} FilesSetDefsPanel.doFileSetsDialog.duplicateRuleSet.text=Rule set with name {0} already exists. FilesSetRulePanel.pathSeparatorInfoLabel.text=Folder must be in parent path. Use '/' to give consecutive names diff --git a/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED index 1d07988e4c..f5dd54dc50 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED @@ -24,7 +24,7 @@ PhotoRecIngestModule.complete.totalParsetime=Total Parsing Time: PhotoRecIngestModule.complete.photoRecResults=PhotoRec Results PhotoRecIngestModule.NotEnoughDiskSpace.detail.msg=PhotoRec error processing {0} with {1} Not enough space on primary disk to save unallocated space. PhotoRecIngestModule.cancelledByUser=PhotoRec cancelled by user. -PhotoRecIngestModule.error.exitValue=PhotoRec carver returned error exit value \= {0} when scanning {1} +PhotoRecIngestModule.error.exitValue=PhotoRec carver returned error exit value = {0} when scanning {1} PhotoRecIngestModule.error.msg=Error processing {0} with PhotoRec carver. PhotoRecIngestModule.complete.numberOfErrors=Number of Errors while Carving: PhotoRecCarverIngestJobSettingsPanel.detectionSettingsLabel.text=PhotoRec Settings diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED index 3db1b822ea..fce93671b3 100755 --- a/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED @@ -5,8 +5,8 @@ ReportHTML.getName.text=HTML Report ReportHTML.getDesc.text=A report about results and tagged items in HTML format. ReportHTML.writeIndex.title=for case {0} ReportHTML.writeIndex.noFrames.msg=Your browser is not compatible with our frame setup. -ReportHTML.writeIndex.noFrames.seeNav=Please see the navigation page for artifact links, -ReportHTML.writeIndex.seeSum=and the summary page for a case summary. +ReportHTML.writeIndex.noFrames.seeNav=Please see the navigation page for artifact links, +ReportHTML.writeIndex.seeSum=and the summary page for a case summary. ReportHTML.writeNav.title=Report Navigation ReportHTML.writeNav.h1=Report Navigation ReportHTML.writeNav.summary=Case Summary @@ -16,7 +16,7 @@ ReportHTML.writeSum.caseNumber=Case Number: ReportHTML.writeSum.caseNumImages=Number of data sources in case: ReportHTML.writeSum.examiner=Examiner: ReportHTML.writeSum.title=Case Summary -ReportHTML.writeSum.warningMsg=Warning, this report was run before ingest services completed\! +ReportHTML.writeSum.warningMsg=Warning, this report was run before ingest services completed! # # autopsy/test/scripts/regression.py._html_report_diff() uses reportGenOn.text, caseName, caseNum, # examiner as a regex signature to skip report.html and summary.html diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED index 86fd175181..854c57bed1 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED @@ -65,15 +65,19 @@ DayOfTheWeekRenderer_Tuesday_Label=Tuesday DayOfTheWeekRenderer_Wednesday_Label=Wednesday GeneralOptionsPanelController.moduleErr.msg=A module caused an error listening to GeneralOptionsPanelController updates. See log to determine which module. Some data could be incomplete. GeneralOptionsPanelController.moduleErr=Module Error +# {0} - errorMessage MultiUserTestTool.criticalError=Critical error running data source processor on test data source: {0} MultiUserTestTool.errorStartingIngestJob=Ingest manager error while starting ingest job +# {0} - cancellationReason MultiUserTestTool.ingestCancelled=Ingest cancelled due to {0} MultiUserTestTool.ingestSettingsError=Failed to analyze data source due to ingest settings errors MultiUserTestTool.noContent=Test data source failed to produce content +# {0} - serviceName MultiUserTestTool.serviceDown=Multi User service is down: {0} MultiUserTestTool.startupError=Failed to analyze data source due to ingest job startup error MultiUserTestTool.unableAddFileAsDataSource=Unable to add test file as data source to case MultiUserTestTool.unableCreatFile=Unable to create a file in case output directory +# {0} - serviceName MultiUserTestTool.unableToCheckService=Unable to check Multi User service state: {0} MultiUserTestTool.unableToCreateCase=Unable to create case MultiUserTestTool.unableToInitializeDatabase=Case database was not successfully initialized diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index cd253dc3cb..083683cb3a 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Thu, 30 Sep 2021 19:36:31 -0400 +#Thu, 04 Nov 2021 16:51:33 -0400 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 2d02262803..f098a02b32 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Thu, 30 Sep 2021 19:36:31 -0400 +#Thu, 04 Nov 2021 16:51:33 -0400 CTL_MainWindow_Title=Autopsy 4.19.2 CTL_MainWindow_Title_No_Project=Autopsy 4.19.2 From 3417c864852d35ea6895f661ec0d7400dea38f1c Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 9 Nov 2021 15:33:50 -0500 Subject: [PATCH 014/142] abstract dao --- .../autopsy/mainui/datamodel/AbstractDao.java | 34 +++ .../mainui/datamodel/DataEventListener.java | 216 ++++++++++++++++++ 2 files changed, 250 insertions(+) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java new file mode 100644 index 0000000000..171ac5516e --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java @@ -0,0 +1,34 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.beans.PropertyChangeEvent; + +/** + * Internal methods that DAOs implement. + */ +abstract class AbstractDao { + + /** + * Clear any cached data (Due to change in view + */ + abstract void clearCaches(); + + abstract void handleAutopsyEvent(PropertyChangeEvent evt); +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java new file mode 100644 index 0000000000..4a3bab12e4 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java @@ -0,0 +1,216 @@ +///* +// * Autopsy Forensic Browser +// * +// * Copyright 2021 Basis Technology Corp. +// * Contact: carrier sleuthkit org +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ +//package org.sleuthkit.autopsy.mainui.datamodel; +// +//import java.beans.PropertyChangeListener; +//import java.text.MessageFormat; +//import java.util.Collection; +//import java.util.EnumSet; +//import java.util.Set; +//import java.util.logging.Level; +//import java.util.prefs.PreferenceChangeListener; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.core.UserPreferences; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.ingest.IngestManager; +//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; +//import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.Content; +// +///** +// * Listener for changes that would affect case data or cached mainui.datamodel +// * data. +// */ +//abstract class DataEventListener { +// +// /** +// * Handles a ModuleDataEvent. +// * +// * @param evt The ModuleDataEvent. +// */ +// protected void onModuleData(ModuleDataEvent evt) { +// } +// +// /** +// * Handles added or modified content. +// * +// * @param changedContent The added or modified content. +// */ +// protected void onContentChange(Content changedContent) { +// } +// +// /** +// * Handles a change in case. +// * +// * @param oldCase The old case (can be null). +// * @param newCase The new case (can be null). +// */ +// protected void onCaseChange(Case oldCase, Case newCase) { +// } +// +// /** +// * Handles a user preference change of page size. +// * +// * @param newPageSize The new page size. +// */ +// protected void onPageSizeChange(int newPageSize) { +// } +// +// /** +// * A default data event listener that handles events like case change and +// * page size change which should invalidate the entire cache. +// */ +// static abstract class DefaultDataEventListener extends DataEventListener { +// +// protected void onCaseChange(Case oldCase, Case newCase) { +// dropCache(); +// } +// +// protected void onPageSizeChange(int newPageSize) { +// dropCache(); +// } +// +// /** +// * Method to drop all cache entries. +// */ +// protected abstract void dropCache(); +// } +// +// /** +// * Delegates events to a list of delegate data event listeners. +// */ +// static abstract class DelegatingDataEventListener extends DataEventListener { +// +// /** +// * Returns a collection of the listeners to which this will delegate. +// * +// * @return The delegate event listeners. +// */ +// protected abstract Collection getDelegateListeners(); +// +// @Override +// protected void onModuleData(ModuleDataEvent evt) { +// getDelegateListeners().forEach((listener) -> listener.onModuleData(evt)); +// } +// +// @Override +// protected void onContentChange(Content changedContent) { +// getDelegateListeners().forEach((listener) -> listener.onContentChange(changedContent)); +// } +// +// @Override +// protected void onCaseChange(Case oldCase, Case newCase) { +// getDelegateListeners().forEach((listener) -> listener.onCaseChange(oldCase, newCase)); +// } +// +// @Override +// protected void onPageSizeChange(int newPageSize) { +// getDelegateListeners().forEach((listener) -> listener.onPageSizeChange(newPageSize)); +// } +// } +// +// /** +// * A delegating event listener which can register and unregister from +// * Autopsy event publishers (i.e. Case.addEventTypeSubscriber). +// */ +// static abstract class RegisteringDataEventListener extends DelegatingDataEventListener { +// +// private static final Logger logger = Logger.getLogger(RegisteringDataEventListener.class.getName()); +// +// /** +// * The relevant ingest module events. +// */ +// private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestModuleEvent.CONTENT_CHANGED, IngestModuleEvent.DATA_ADDED); +// +// /** +// * The ingest module event listener. +// */ +// private final PropertyChangeListener ingestModuleEventListener = (evt) -> { +// String eventName = evt.getPropertyName(); +// if (IngestModuleEvent.DATA_ADDED.toString().equals(eventName) +// && (evt.getOldValue() instanceof ModuleDataEvent)) { +// +// this.onModuleData((ModuleDataEvent) evt.getOldValue()); +// +// } else if (IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) +// && (evt.getOldValue() instanceof ModuleContentEvent) +// && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { +// +// Content changedContent = (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); +// this.onContentChange(changedContent); +// +// } else if (IngestModuleEvent.FILE_DONE.toString().equals(eventName) +// && evt.getNewValue() instanceof Content) { +// +// Content changedContent = (Content) evt.getNewValue(); +// this.onContentChange(changedContent); +// +// } else { +// logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", eventName, evt)); +// } +// }; +// +// /** +// * The relevant case events. +// */ +// private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); +// +// /** +// * The case event listener. +// */ +// private final PropertyChangeListener caseEventListener = (evt) -> { +// if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString()) +// && (evt.getOldValue() == null || evt.getOldValue() instanceof Case) +// && (evt.getNewValue() == null || evt.getNewValue() instanceof Case)) { +// this.onCaseChange((Case) evt.getOldValue(), (Case) evt.getNewValue()); +// } else { +// logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", evt.getPropertyName(), evt)); +// } +// }; +// +// /** +// * The user preference listener. +// */ +// private final PreferenceChangeListener userPreferenceListener = (evt) -> { +// if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) { +// int pageSize = UserPreferences.getResultsTablePageSize(); +// this.onPageSizeChange(pageSize); +// } +// }; +// +// /** +// * Registers listeners with autopsy event publishers. +// */ +// protected void register() { +// IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); +// Case.addEventTypeSubscriber(CASE_EVENTS, caseEventListener); +// UserPreferences.addChangeListener(userPreferenceListener); +// } +// +// /** +// * Unregisters listeners from autopsy event publishers. +// */ +// protected void unregister() { +// IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); +// Case.removeEventTypeSubscriber(CASE_EVENTS, caseEventListener); +// UserPreferences.removeChangeListener(userPreferenceListener); +// } +// } +//} From d7883f0fa2232b1c5d6e2db2aa0747025e8c9a8a Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 9 Nov 2021 18:47:59 -0500 Subject: [PATCH 015/142] updates --- .../autopsy/mainui/datamodel/AbstractDao.java | 8 +- .../datamodel/AutopsyDAOEventListener.java | 123 ++++++++++ .../datamodel/BlackboardArtifactDAO.java | 2 +- .../mainui/datamodel/DataEventListener.java | 216 ------------------ 4 files changed, 131 insertions(+), 218 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java delete mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java index 171ac5516e..7fc92c591e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java @@ -23,12 +23,18 @@ import java.beans.PropertyChangeEvent; /** * Internal methods that DAOs implement. */ -abstract class AbstractDao { +abstract class AbstractDAO { /** * Clear any cached data (Due to change in view */ abstract void clearCaches(); + /** + * Handles an autopsy event (i.e. ingest, case, etc.). + * @param evt The autopsy event. + */ abstract void handleAutopsyEvent(PropertyChangeEvent evt); + + } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java new file mode 100644 index 0000000000..fb733fdc8a --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java @@ -0,0 +1,123 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.beans.PropertyChangeListener; +import java.text.MessageFormat; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Set; +import java.util.logging.Level; +import java.util.prefs.PreferenceChangeListener; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.Content; + +/** + * Listener for changes that would affect case data or cached mainui.datamodel + * data. + */ +abstract class AutopsyDAOEventListener { + + private static final Logger logger = Logger.getLogger(RegisteringDataEventListener.class.getName()); + + /** + * The relevant ingest module events. + */ + private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestModuleEvent.CONTENT_CHANGED, IngestModuleEvent.DATA_ADDED); + + /** + * The ingest module event listener. + */ + private final PropertyChangeListener ingestModuleEventListener = (evt) -> { + String eventName = evt.getPropertyName(); + if (IngestModuleEvent.DATA_ADDED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleDataEvent)) { + + this.onModuleData((ModuleDataEvent) evt.getOldValue()); + + } else if (IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleContentEvent) + && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { + + Content changedContent = (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); + this.onContentChange(changedContent); + + } else if (IngestModuleEvent.FILE_DONE.toString().equals(eventName) + && evt.getNewValue() instanceof Content) { + + Content changedContent = (Content) evt.getNewValue(); + this.onContentChange(changedContent); + + } else { + logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", eventName, evt)); + } + }; + + /** + * The relevant case events. + */ + private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); + + /** + * The case event listener. + */ + private final PropertyChangeListener caseEventListener = (evt) -> { + if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString()) + && (evt.getOldValue() == null || evt.getOldValue() instanceof Case) + && (evt.getNewValue() == null || evt.getNewValue() instanceof Case)) { + this.onCaseChange((Case) evt.getOldValue(), (Case) evt.getNewValue()); + } else { + logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", evt.getPropertyName(), evt)); + } + }; + + /** + * The user preference listener. + */ + private final PreferenceChangeListener userPreferenceListener = (evt) -> { + if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) { + int pageSize = UserPreferences.getResultsTablePageSize(); + this.onPageSizeChange(pageSize); + } + }; + + /** + * Registers listeners with autopsy event publishers. + */ + protected void register() { + IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + Case.addEventTypeSubscriber(CASE_EVENTS, caseEventListener); + UserPreferences.addChangeListener(userPreferenceListener); + } + + /** + * Unregisters listeners from autopsy event publishers. + */ + protected void unregister() { + IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + Case.removeEventTypeSubscriber(CASE_EVENTS, caseEventListener); + UserPreferences.removeChangeListener(userPreferenceListener); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java index 12f5c5703e..c5ea115a5e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java @@ -71,7 +71,7 @@ import org.sleuthkit.datamodel.TskCoreException; "BlackboardArtifactDAO.columnKeys.dataSource.displayName=Data Source", "BlackboardArtifactDAO.columnKeys.dataSource.description=Data Source" }) -abstract class BlackboardArtifactDAO { +abstract class BlackboardArtifactDAO extends AbstractDAO { private static Logger logger = Logger.getLogger(BlackboardArtifactDAO.class.getName()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java deleted file mode 100644 index 4a3bab12e4..0000000000 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataEventListener.java +++ /dev/null @@ -1,216 +0,0 @@ -///* -// * Autopsy Forensic Browser -// * -// * Copyright 2021 Basis Technology Corp. -// * Contact: carrier sleuthkit org -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// */ -//package org.sleuthkit.autopsy.mainui.datamodel; -// -//import java.beans.PropertyChangeListener; -//import java.text.MessageFormat; -//import java.util.Collection; -//import java.util.EnumSet; -//import java.util.Set; -//import java.util.logging.Level; -//import java.util.prefs.PreferenceChangeListener; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.autopsy.core.UserPreferences; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import org.sleuthkit.autopsy.ingest.IngestManager; -//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; -//import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -//import org.sleuthkit.datamodel.Content; -// -///** -// * Listener for changes that would affect case data or cached mainui.datamodel -// * data. -// */ -//abstract class DataEventListener { -// -// /** -// * Handles a ModuleDataEvent. -// * -// * @param evt The ModuleDataEvent. -// */ -// protected void onModuleData(ModuleDataEvent evt) { -// } -// -// /** -// * Handles added or modified content. -// * -// * @param changedContent The added or modified content. -// */ -// protected void onContentChange(Content changedContent) { -// } -// -// /** -// * Handles a change in case. -// * -// * @param oldCase The old case (can be null). -// * @param newCase The new case (can be null). -// */ -// protected void onCaseChange(Case oldCase, Case newCase) { -// } -// -// /** -// * Handles a user preference change of page size. -// * -// * @param newPageSize The new page size. -// */ -// protected void onPageSizeChange(int newPageSize) { -// } -// -// /** -// * A default data event listener that handles events like case change and -// * page size change which should invalidate the entire cache. -// */ -// static abstract class DefaultDataEventListener extends DataEventListener { -// -// protected void onCaseChange(Case oldCase, Case newCase) { -// dropCache(); -// } -// -// protected void onPageSizeChange(int newPageSize) { -// dropCache(); -// } -// -// /** -// * Method to drop all cache entries. -// */ -// protected abstract void dropCache(); -// } -// -// /** -// * Delegates events to a list of delegate data event listeners. -// */ -// static abstract class DelegatingDataEventListener extends DataEventListener { -// -// /** -// * Returns a collection of the listeners to which this will delegate. -// * -// * @return The delegate event listeners. -// */ -// protected abstract Collection getDelegateListeners(); -// -// @Override -// protected void onModuleData(ModuleDataEvent evt) { -// getDelegateListeners().forEach((listener) -> listener.onModuleData(evt)); -// } -// -// @Override -// protected void onContentChange(Content changedContent) { -// getDelegateListeners().forEach((listener) -> listener.onContentChange(changedContent)); -// } -// -// @Override -// protected void onCaseChange(Case oldCase, Case newCase) { -// getDelegateListeners().forEach((listener) -> listener.onCaseChange(oldCase, newCase)); -// } -// -// @Override -// protected void onPageSizeChange(int newPageSize) { -// getDelegateListeners().forEach((listener) -> listener.onPageSizeChange(newPageSize)); -// } -// } -// -// /** -// * A delegating event listener which can register and unregister from -// * Autopsy event publishers (i.e. Case.addEventTypeSubscriber). -// */ -// static abstract class RegisteringDataEventListener extends DelegatingDataEventListener { -// -// private static final Logger logger = Logger.getLogger(RegisteringDataEventListener.class.getName()); -// -// /** -// * The relevant ingest module events. -// */ -// private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestModuleEvent.CONTENT_CHANGED, IngestModuleEvent.DATA_ADDED); -// -// /** -// * The ingest module event listener. -// */ -// private final PropertyChangeListener ingestModuleEventListener = (evt) -> { -// String eventName = evt.getPropertyName(); -// if (IngestModuleEvent.DATA_ADDED.toString().equals(eventName) -// && (evt.getOldValue() instanceof ModuleDataEvent)) { -// -// this.onModuleData((ModuleDataEvent) evt.getOldValue()); -// -// } else if (IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) -// && (evt.getOldValue() instanceof ModuleContentEvent) -// && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { -// -// Content changedContent = (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); -// this.onContentChange(changedContent); -// -// } else if (IngestModuleEvent.FILE_DONE.toString().equals(eventName) -// && evt.getNewValue() instanceof Content) { -// -// Content changedContent = (Content) evt.getNewValue(); -// this.onContentChange(changedContent); -// -// } else { -// logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", eventName, evt)); -// } -// }; -// -// /** -// * The relevant case events. -// */ -// private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); -// -// /** -// * The case event listener. -// */ -// private final PropertyChangeListener caseEventListener = (evt) -> { -// if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString()) -// && (evt.getOldValue() == null || evt.getOldValue() instanceof Case) -// && (evt.getNewValue() == null || evt.getNewValue() instanceof Case)) { -// this.onCaseChange((Case) evt.getOldValue(), (Case) evt.getNewValue()); -// } else { -// logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", evt.getPropertyName(), evt)); -// } -// }; -// -// /** -// * The user preference listener. -// */ -// private final PreferenceChangeListener userPreferenceListener = (evt) -> { -// if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) { -// int pageSize = UserPreferences.getResultsTablePageSize(); -// this.onPageSizeChange(pageSize); -// } -// }; -// -// /** -// * Registers listeners with autopsy event publishers. -// */ -// protected void register() { -// IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); -// Case.addEventTypeSubscriber(CASE_EVENTS, caseEventListener); -// UserPreferences.addChangeListener(userPreferenceListener); -// } -// -// /** -// * Unregisters listeners from autopsy event publishers. -// */ -// protected void unregister() { -// IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); -// Case.removeEventTypeSubscriber(CASE_EVENTS, caseEventListener); -// UserPreferences.removeChangeListener(userPreferenceListener); -// } -// } -//} From 0f79c5b19a024e42b03b3fef6540c37c07c0f9fd Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 9 Nov 2021 20:11:24 -0500 Subject: [PATCH 016/142] beginning concept --- .../autopsy/mainui/datamodel/AbstractDao.java | 13 +- .../mainui/datamodel/AnalysisResultDAO.java | 14 +- .../datamodel/AutopsyDAOEventListener.java | 123 ------------------ .../mainui/datamodel/DAOAggregateEvent.java | 37 ++++++ .../mainui/datamodel/DAOEventUtils.java | 76 +++++++++++ .../mainui/datamodel/DataArtifactDAO.java | 12 +- .../autopsy/mainui/datamodel/MainDAO.java | 112 +++++++++++++++- .../autopsy/mainui/datamodel/ViewsDAO.java | 6 +- .../autopsy/mainui/nodes/DAOFetcher.java | 48 ------- 9 files changed, 255 insertions(+), 186 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java index 7fc92c591e..39d7127b9b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java @@ -19,22 +19,25 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.beans.PropertyChangeEvent; +import java.util.List; /** * Internal methods that DAOs implement. */ abstract class AbstractDAO { - + /** * Clear any cached data (Due to change in view */ abstract void clearCaches(); - + /** * Handles an autopsy event (i.e. ingest, case, etc.). + * * @param evt The autopsy event. + * + * @return The list of dao events emitted due to this autopsy event. */ - abstract void handleAutopsyEvent(PropertyChangeEvent evt); - - + abstract List handleAutopsyEvent(PropertyChangeEvent evt); + } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index aea8e47e47..51848a6935 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -682,6 +682,18 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } } + @Override + void clearCaches() { + this.analysisResultCache.invalidateAll(); + this.keywordHitCache.invalidateAll(); + this.setHitCache.invalidateAll(); + } + + @Override + List handleAutopsyEvent(PropertyChangeEvent evt) { + throw new UnsupportedOperationException("Not supported yet."); + } + /** * Handles basic functionality of fetching and paging of analysis results. */ @@ -698,7 +710,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { - ModuleDataEvent dataEvent = this.getModuleDataFromEvt(evt); + ModuleDataEvent dataEvent = DAOEventUtils.getModuleDataFromEvt(evt); if (dataEvent == null) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java deleted file mode 100644 index fb733fdc8a..0000000000 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AutopsyDAOEventListener.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.mainui.datamodel; - -import java.beans.PropertyChangeListener; -import java.text.MessageFormat; -import java.util.Collection; -import java.util.EnumSet; -import java.util.Set; -import java.util.logging.Level; -import java.util.prefs.PreferenceChangeListener; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.core.UserPreferences; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.Content; - -/** - * Listener for changes that would affect case data or cached mainui.datamodel - * data. - */ -abstract class AutopsyDAOEventListener { - - private static final Logger logger = Logger.getLogger(RegisteringDataEventListener.class.getName()); - - /** - * The relevant ingest module events. - */ - private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestModuleEvent.CONTENT_CHANGED, IngestModuleEvent.DATA_ADDED); - - /** - * The ingest module event listener. - */ - private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - String eventName = evt.getPropertyName(); - if (IngestModuleEvent.DATA_ADDED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleDataEvent)) { - - this.onModuleData((ModuleDataEvent) evt.getOldValue()); - - } else if (IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleContentEvent) - && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { - - Content changedContent = (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); - this.onContentChange(changedContent); - - } else if (IngestModuleEvent.FILE_DONE.toString().equals(eventName) - && evt.getNewValue() instanceof Content) { - - Content changedContent = (Content) evt.getNewValue(); - this.onContentChange(changedContent); - - } else { - logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", eventName, evt)); - } - }; - - /** - * The relevant case events. - */ - private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); - - /** - * The case event listener. - */ - private final PropertyChangeListener caseEventListener = (evt) -> { - if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString()) - && (evt.getOldValue() == null || evt.getOldValue() instanceof Case) - && (evt.getNewValue() == null || evt.getNewValue() instanceof Case)) { - this.onCaseChange((Case) evt.getOldValue(), (Case) evt.getNewValue()); - } else { - logger.log(Level.WARNING, MessageFormat.format("Unknown event with eventName: {0} and event: {1}.", evt.getPropertyName(), evt)); - } - }; - - /** - * The user preference listener. - */ - private final PreferenceChangeListener userPreferenceListener = (evt) -> { - if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) { - int pageSize = UserPreferences.getResultsTablePageSize(); - this.onPageSizeChange(pageSize); - } - }; - - /** - * Registers listeners with autopsy event publishers. - */ - protected void register() { - IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); - Case.addEventTypeSubscriber(CASE_EVENTS, caseEventListener); - UserPreferences.addChangeListener(userPreferenceListener); - } - - /** - * Unregisters listeners from autopsy event publishers. - */ - protected void unregister() { - IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); - Case.removeEventTypeSubscriber(CASE_EVENTS, caseEventListener); - UserPreferences.removeChangeListener(userPreferenceListener); - } -} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java new file mode 100644 index 0000000000..ebc446206b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java @@ -0,0 +1,37 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.util.Set; + +/** + * A single event containing an aggregate of all affected data. + */ +public class DAOAggregateEvent { + + private final Set objects; + + public DAOAggregateEvent(Set objects) { + this.objects = objects; + } + + public Set getEvents() { + return objects; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java new file mode 100644 index 0000000000..c538ac0268 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java @@ -0,0 +1,76 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.beans.PropertyChangeEvent; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.Content; + +/** + * + * Utilities for handling events in DAO + */ +public class DAOEventUtils { + + /** + * Returns the content from the ModuleContentEvent. If the event does not + * contain a ModuleContentEvent or the event does not contain Content, null + * is returned. + * + * @param evt The event + * + * @return The inner content or null if no content. + */ + static Content getContentFromEvt(PropertyChangeEvent evt) { + String eventName = evt.getPropertyName(); + if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleContentEvent) + && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { + + return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); + + } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) + && (evt.getNewValue() instanceof Content)) { + return (Content) evt.getNewValue(); + } else { + return null; + } + } + + /** + * Returns the ModuleDataEvent in the event if there is a child + * ModuleDataEvent. If not, null is returned. + * + * @param evt The event. + * + * @return The inner ModuleDataEvent or null. + */ + static ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) { + String eventName = evt.getPropertyName(); + if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleDataEvent)) { + + return (ModuleDataEvent) evt.getOldValue(); + } else { + return null; + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index e8436d7de9..70b00aa8d7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -154,6 +154,16 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } + @Override + void clearCaches() { + this.dataArtifactCache.invalidateAll(); + } + + @Override + List handleAutopsyEvent(PropertyChangeEvent evt) { + ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + } + /* * Handles fetching and paging of data artifacts. */ @@ -175,7 +185,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { - ModuleDataEvent dataEvent = this.getModuleDataFromEvt(evt); + ModuleDataEvent dataEvent = DAOEventUtils.getModuleDataFromEvt(evt); if (dataEvent == null) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 81a21ef749..e4f5fe64d4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -18,11 +18,27 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import com.google.common.collect.ImmutableList; +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.beans.PropertyChangeSupport; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.prefs.PreferenceChangeListener; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.collections.CollectionUtils; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.ingest.IngestManager; + /** * Main entry point for DAO for providing data to populate the data results * viewer. */ -public class MainDAO { +public class MainDAO extends AbstractDAO { private static MainDAO instance = null; @@ -41,10 +57,13 @@ public class MainDAO { private final TagsDAO tagsDAO = TagsDAO.getInstance(); private final OsAccountsDAO accountsDAO = OsAccountsDAO.getInstance(); + // GVDTODO when events are completely integrated, this list should contain all sub-DAO's + private final List allDAOs = ImmutableList.of(dataArtifactDAO); + public DataArtifactDAO getDataArtifactsDAO() { return dataArtifactDAO; } - + public AnalysisResultDAO getAnalysisResultDAO() { return analysisResultDAO; } @@ -52,16 +71,99 @@ public class MainDAO { public ViewsDAO getViewsDAO() { return viewsDAO; } - + public FileSystemDAO getFileSystemDAO() { return fileSystemDAO; } - + public TagsDAO getTagsDAO() { return tagsDAO; } - + public OsAccountsDAO getOsAccountsDAO() { return accountsDAO; } + + @Override + void clearCaches() { + allDAOs.forEach((subDAO) -> subDAO.clearCaches()); + } + + @Override + List handleAutopsyEvent(PropertyChangeEvent evt) { + return Stream.of(allDAOs) + .map(subDAO -> handleAutopsyEvent(evt)) + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toList()); + } + + /** + * The relevant ingest module events. + */ + private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); + + /** + * The relevant case events. + */ + private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); + + /** + * The ingest module event listener. + */ + private final PropertyChangeListener ingestModuleEventListener = (evt) -> { + processAndFireDAOEvent(evt); + }; + + /** + * The case event listener. + */ + private final PropertyChangeListener caseEventListener = (evt) -> { + if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { + this.clearCaches(); + } else { + processAndFireDAOEvent(evt); + } + }; + + /** + * The user preference listener. + */ + private final PreferenceChangeListener userPreferenceListener = (evt) -> { + this.clearCaches(); + }; + + private final PropertyChangeSupport support = new PropertyChangeSupport(this); + + public void addPropertyChangeListener(PropertyChangeListener listener) { + support.addPropertyChangeListener(listener); + } + + public void removePropertyChangeListener(PropertyChangeListener listener) { + support.removePropertyChangeListener(listener); + } + + /** + * Registers listeners with autopsy event publishers. + */ + void register() { + IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + Case.addEventTypeSubscriber(CASE_EVENTS, caseEventListener); + UserPreferences.addChangeListener(userPreferenceListener); + } + + /** + * Unregisters listeners from autopsy event publishers. + */ + void unregister() { + IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + Case.removeEventTypeSubscriber(CASE_EVENTS, caseEventListener); + UserPreferences.removeChangeListener(userPreferenceListener); + } + + void processAndFireDAOEvent(PropertyChangeEvent autopsyEvent) { + List events = this.handleAutopsyEvent(autopsyEvent); + if (CollectionUtils.isNotEmpty(events)) { + support.firePropertyChange(new PropertyChangeEvent(this, "DATA_CHANGE", null, new DAOAggregateEvent(new HashSet<>(events)))); + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 6f88d47289..33914fb6e9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -663,7 +663,7 @@ public class ViewsDAO { @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = this.getContentFromEvt(evt); + Content content = DAOEventUtils.getContentFromEvt(evt); if (content == null) { return false; } @@ -693,7 +693,7 @@ public class ViewsDAO { @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = this.getContentFromEvt(evt); + Content content = DAOEventUtils.getContentFromEvt(evt); if (content == null) { return false; } @@ -723,7 +723,7 @@ public class ViewsDAO { @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = this.getContentFromEvt(evt); + Content content = DAOEventUtils.getContentFromEvt(evt); if (content == null) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java index ff3c108032..a4f60d3cfe 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java @@ -20,11 +20,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.beans.PropertyChangeEvent; import java.util.concurrent.ExecutionException; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; -import org.sleuthkit.datamodel.Content; /** * Provides a generic interface to perform searches and determine if refreshes @@ -75,48 +71,4 @@ public abstract class DAOFetcher

{ * @return True if the */ public abstract boolean isRefreshRequired(PropertyChangeEvent evt); - - /** - * Returns the content from the ModuleContentEvent. If the event does not - * contain a ModuleContentEvent or the event does not contain Content, null - * is returned. - * - * @param evt The event - * - * @return The inner content or null if no content. - */ - protected Content getContentFromEvt(PropertyChangeEvent evt) { - String eventName = evt.getPropertyName(); - if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleContentEvent) - && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { - - return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); - - } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) - && (evt.getNewValue() instanceof Content)) { - return (Content) evt.getNewValue(); - } else { - return null; - } - } - - /** - * Returns the ModuleDataEvent in the event if there is a child - * ModuleDataEvent. If not, null is returned. - * - * @param evt The event. - * - * @return The inner ModuleDataEvent or null. - */ - protected ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) { - String eventName = evt.getPropertyName(); - if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleDataEvent)) { - - return (ModuleDataEvent) evt.getOldValue(); - } else { - return null; - } - } } From 071941965aea7997782ea208f17b0a028ecb224f Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 10 Nov 2021 09:46:19 -0500 Subject: [PATCH 017/142] 7895 CR data artifact ingest module --- .../Bundle.properties-MERGED | 5 +- .../datamodel/CorrelationAttributeUtil.java | 1 - .../CentralRepoDataArtifactIngestModule.java | 347 +++++------------- .../CentralRepoFileIngestModule.java | 325 ++++++---------- .../autopsy/ingest/FileIngestModule.java | 9 +- .../autopsy/ingest/IngestModule.java | 5 + 6 files changed, 217 insertions(+), 475 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED index b2320b5408..b4f7f835ef 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/Bundle.properties-MERGED @@ -5,7 +5,10 @@ CentralRepoCommentDialog.title.addEditCentralRepoComment=Add/Edit Central Reposi OpenIDE-Module-Name=Central Repository OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Short-Description=Central Repository Ingest Module -OpenIDE-Module-Long-Description=Central Repository ingest module and central database. \n\nThe Central Repository ingest module stores attributes of artifacts matching selected correlation types into a central database.\nStored attributes are used in future cases to correlate and analyzes files and artifacts during ingest. +OpenIDE-Module-Long-Description=\ + Central Repository ingest module and central database. \n\n\ + The Central Repository ingest module stores attributes of artifacts matching selected correlation types into a central database.\n\ + Stored attributes are used in future cases to correlate and analyzes files and artifacts during ingest. CentralRepoCommentDialog.commentLabel.text=Comment: CentralRepoCommentDialog.okButton.text=&OK CentralRepoCommentDialog.cancelButton.text=C&ancel diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java index 06054a31c5..50262cd988 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeUtil.java @@ -40,7 +40,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; -import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.InvalidAccountIDException; import org.sleuthkit.datamodel.OsAccount; diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index 4b111d95f2..ff6393b638 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -19,16 +19,11 @@ package org.sleuthkit.autopsy.centralrepository.ingestmodule; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.logging.Level; -import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; @@ -38,27 +33,21 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; -import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.getOccurrencesInOtherCases; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevSeenAnalysisResult; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevUnseenAnalysisResult; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.datamodel.AnalysisResult; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.OsAccountManager; -import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -73,19 +62,13 @@ import org.sleuthkit.datamodel.TskData; public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule { private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName()); - private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; - private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; - private final Set corrAttrsAlreadyCreated; - private final boolean saveCorrAttrs; private final boolean flagNotableItems; private final boolean flagPrevSeenDevices; private final boolean flagUniqueArtifacts; - private Case currentCase; - private Blackboard blackboard; - private OsAccountManager osAccountMgr; + private final boolean saveCorrAttrInstances; + private final Set corrAttrsAnalyzed; private CentralRepository centralRepo; - private Content dataSource; - private long ingestJobId; + private IngestJobContext context; /** * Constructs a data artifact ingest module that adds correlation attributes @@ -97,51 +80,45 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo * @param settings The ingest job settings for this module. */ CentralRepoDataArtifactIngestModule(IngestSettings settings) { - corrAttrsAlreadyCreated = new LinkedHashSet<>(); - saveCorrAttrs = settings.shouldCreateCorrelationProperties(); flagNotableItems = settings.isFlagTaggedNotableItems(); flagPrevSeenDevices = settings.isFlagPreviousDevices(); flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); + saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); + corrAttrsAnalyzed = new LinkedHashSet<>(); } @NbBundle.Messages({ "CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled", - "CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case", - "CentralRepoIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager", "CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository", + "CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case", "CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository" }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { - /* - * IMPORTANT: Start up IngestModuleException messages are displayed to - * the user, if a user is present. Therefore, an exception to the policy - * that exception messages are not localized is appropriate here. Also, - * the exception messages should be user-friendly. - */ - dataSource = context.getDataSource(); - ingestJobId = context.getJobId(); + this.context = context; + if (!CentralRepository.isEnabled()) { throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); // May be displayed to user. } + try { - currentCase = Case.getCurrentCaseThrows(); - SleuthkitCase tskCase = currentCase.getSleuthkitCase(); - blackboard = tskCase.getBlackboard(); - osAccountMgr = tskCase.getOsAccountManager(); centralRepo = CentralRepository.getInstance(); - } catch (NoCurrentCaseException ex) { - throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex); - } catch (TskCoreException ex) { - throw new IngestModuleException(Bundle.CentralRepoIngestModule_osAcctMgrInaccessibleErrMsg(), ex); } catch (CentralRepoException ex) { throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex); } - // Don't allow sqlite central repo databases to be used for multi user cases - if ((currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { - throw new IngestModuleException(Bundle.CentralRepoIngestModule_crDatabaseTypeMismatch()); - } + /* + * Don't allow a SQLite central repository to be used for a multi-user + * case. + */ + try { + Case currentCase = Case.getCurrentCaseThrows(); + if ((currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crDatabaseTypeMismatch()); + } + } catch (NoCurrentCaseException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex); + } } /** @@ -156,95 +133,89 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo */ @Override public ProcessResult process(DataArtifact artifact) { - List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(artifact); - for (CorrelationAttributeInstance corrAttr : corrAttrs) { - if (!corrAttrsAlreadyCreated.add(corrAttr.toString())) { - /* - * This is a bit of a time saver. Uniqueness constraints in the - * central repository prevent creation of duplicate correlation - * attributes, so this saves no-op central repository insert - * attempts. - */ - continue; - } - - makeAnalysisResults(artifact, corrAttr); - - if (saveCorrAttrs) { - try { - centralRepo.addAttributeInstanceBulk(corrAttr); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for data artifact '%s' (job ID=%d)", corrAttr, artifact, ingestJobId), ex); //NON-NLS - } - } - } + analyze(artifact); return ProcessResult.OK; } @Override public void shutDown() { - if (saveCorrAttrs || flagPrevSeenDevices) { - analyzeOsAccounts(); - } - if (saveCorrAttrs) { + analyzeOsAccounts(); + if (saveCorrAttrInstances) { try { centralRepo.commitAttributeInstancesBulk(); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", ingestJobId), ex); // NON-NLS + LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS } } syncDataSourceHashes(); } /** - * Adds correlation attributes to the central repository for the OS accounts - * in the data source and creates previously seen analysis results for the - * accounts if they have been seen in other cases. + * Translates the attributes of an OS account into central repository + * correlation attributes and uses them to create analysis results and new + * central repository correlation attribute instances, depending on ingest + * job settings. */ @NbBundle.Messages({ "CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", "CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" }) private void analyzeOsAccounts() { - try { - List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(dataSource.getId()); - for (OsAccount osAccount : osAccounts) { - process(osAccount, dataSource); + if (saveCorrAttrInstances || flagPrevSeenDevices) { + try { + Case currentCase = Case.getCurrentCaseThrows(); + SleuthkitCase tskCase = currentCase.getSleuthkitCase(); + OsAccountManager osAccountMgr = tskCase.getOsAccountManager(); + List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(context.getDataSource().getId()); + for (OsAccount osAccount : osAccounts) { + analyze(osAccount); + } + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", context.getDataSource(), context.getJobId()), ex); } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", dataSource, ingestJobId), ex); } } /** - * Translates the attributes of a OS account and its data source (an OS - * account instance) into central repository correlation attributes and uses - * them to create analysis results and new central repository correlation - * attribute instances, depending on ingest job settings. + * Translates the attributes of a data artifact or an OS account into + * central repository correlation attributes and uses them to create + * analysis results and new central repository correlation attribute + * instances, depending on ingest job settings. * - * @param osAccount The OS account. - * @param dataSource The data source. + * @param content The artifact or account. */ - private void process(OsAccount osAccount, Content dataSource) { - List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, dataSource); + private void analyze(Content content) { + if (content == null || (!flagNotableItems && !flagPrevSeenDevices && !flagUniqueArtifacts && !saveCorrAttrInstances)) { + return; + } + + DataArtifact artifact = null; + OsAccount osAccount = null; + List corrAttrs = new ArrayList<>(); + if (content instanceof DataArtifact) { + artifact = (DataArtifact) content; + corrAttrs.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)); + } else { + osAccount = (OsAccount) content; + corrAttrs.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, context.getDataSource())); + } + for (CorrelationAttributeInstance corrAttr : corrAttrs) { - if (!corrAttrsAlreadyCreated.add(corrAttr.toString())) { - /* - * This is a bit of a time saver. Uniqueness constraints in the - * central repository prevent creation of duplicate correlation - * attributes, so this saves no-op central repository insert - * attempts. - */ + if (!corrAttrsAnalyzed.add(corrAttr.toString())) { continue; } - makeAnalysisResults(osAccount, corrAttr); + if (artifact != null) { + makeAnalysisResults(artifact, corrAttr); + } else { + makeAnalysisResults(osAccount, corrAttr); + } - if (saveCorrAttrs) { + if (saveCorrAttrInstances) { try { centralRepo.addAttributeInstanceBulk(corrAttr); } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for OS account '%s' (job ID=%d)", corrAttr, osAccount, ingestJobId), ex); + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for (object ID=%d, job ID=%d)", corrAttr, content.getId(), context.getJobId()), ex); //NON-NLS } } } @@ -260,7 +231,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo private void makeAnalysisResults(DataArtifact artifact, CorrelationAttributeInstance corrAttr) { List previousOccurrences = null; if (flagNotableItems) { - previousOccurrences = getOccurrencesInOtherCases(corrAttr); + previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); if (!previousOccurrences.isEmpty()) { Set previousCases = new HashSet<>(); for (CorrelationAttributeInstance occurrence : previousOccurrences) { @@ -269,7 +240,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } if (!previousCases.isEmpty()) { - makePrevNotableAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + makePrevNotableAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); } } } @@ -283,12 +254,12 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) { if (previousOccurrences == null) { - previousOccurrences = getOccurrencesInOtherCases(corrAttr); + previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); } if (!previousOccurrences.isEmpty()) { Set previousCases = getPreviousCases(previousOccurrences); if (!previousCases.isEmpty()) { - makePrevSeenAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + makePrevSeenAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); } } } @@ -297,10 +268,10 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo && (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID || corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { if (previousOccurrences == null) { - previousOccurrences = getOccurrencesInOtherCases(corrAttr); + previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); } if (previousOccurrences.isEmpty()) { - makePrevUnseenAnalysisResult(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + makePrevUnseenAnalysisResult(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); } } } @@ -314,42 +285,16 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo */ private void makeAnalysisResults(OsAccount osAccount, CorrelationAttributeInstance corrAttr) { if (flagPrevSeenDevices) { - List previousOccurrences = getOccurrencesInOtherCases(corrAttr); + List previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId()); if (!previousOccurrences.isEmpty()) { Set previousCases = getPreviousCases(previousOccurrences); if (!previousCases.isEmpty()) { - makePrevSeenAnalysisResult(osAccount, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + makePrevSeenAnalysisResult(osAccount, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId()); } } } } - /** - * Gets any previous occurrences of a given correlation attribute in cases - * other than the current case. - * - * @param corrAttr The correlation attribute. - * - * @return The other occurrences of the correlation attribute. - */ - private List getOccurrencesInOtherCases(CorrelationAttributeInstance corrAttr) { - List previousOccurrences = new ArrayList<>(); - try { - previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); - for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { - CorrelationAttributeInstance prevOccurrence = iterator.next(); - if (prevOccurrence.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) { - iterator.remove(); - } - } - } catch (CorrelationAttributeNormalizationException ex) { - LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value for 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS - } - return previousOccurrences; - } - /** * Gets a unique set of previous cases, represented by their names, from a * list of previous occurrences of correlation attributes. @@ -366,142 +311,28 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo return previousCases; } - /** - * Makes a previously notable analysis result for a content. - * - * @param content The content. - * @param previousCases The names of the cases in which the artifact was - * deemed notable. - * @param corrAttrType The type of the matched correlation attribute. - * @param corrAttrValue The value of the matched correlation attribute. - */ - @NbBundle.Messages({ - "CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)", - "# {0} - list of cases", - "CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}" - }) - private void makePrevNotableAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { - String prevCases = previousCases.stream().collect(Collectors.joining(",")); - String justification = Bundle.CentralRepoIngestModule_notableJustification(prevCases); - Collection attributes = Arrays.asList(new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CentralRepoIngestModule_notableSetName()), - new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), - new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), - new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); - makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification); - } - - /** - * Makes a previously seen analysis result for a content, unless the content - * is too common. - * - * @param content The content. - * @param previousCases The names of the cases in which the artifact was - * previously seen. - * @param corrAttrType The type of the matched correlation attribute. - * @param corrAttrValue The value of the matched correlation attribute. - */ - @NbBundle.Messages({ - "CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)", - "# {0} - list of cases", - "CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}" - }) - private void makePrevSeenAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { - Optional score = calculateScore(previousCases.size()); - if (score.isPresent()) { - String prevCases = previousCases.stream().collect(Collectors.joining(",")); - String justification = Bundle.CentralRepoIngestModule_prevSeenJustification(prevCases); - Collection analysisResultAttributes = Arrays.asList( - new BlackboardAttribute(TSK_SET_NAME, CentralRepoIngestModuleFactory.getModuleName(), Bundle.CentralRepoIngestModule_prevSeenSetName()), - new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), - new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue), - new BlackboardAttribute(TSK_OTHER_CASES, CentralRepoIngestModuleFactory.getModuleName(), prevCases)); - makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score.get(), justification); - } - } - - /** - * Makes a previously unseen analysis result for a content. - * - * @param content The content. - * @param corrAttrType The type of the new correlation attribute. - * @param corrAttrValue The value of the new correlation attribute. - */ - @NbBundle.Messages({ - "CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases" - }) - private void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue) { - Collection attributesForNewArtifact = Arrays.asList( - new BlackboardAttribute(TSK_CORRELATION_TYPE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrType.getDisplayName()), - new BlackboardAttribute(TSK_CORRELATION_VALUE, CentralRepoIngestModuleFactory.getModuleName(), corrAttrValue)); - makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CentralRepoIngestModule_prevUnseenJustification()); - } - - /** - * Calculates a score based in a number of previous cases. - * - * @param numPreviousCases The number of previous cases. - * - * @return An Optional of a score, will be empty if there is no score - * because the number of previous cases is too high, indicating a - * common and therefore uninteresting item. - */ - private Optional calculateScore(int numPreviousCases) { - Score score = null; - if (numPreviousCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { - score = Score.SCORE_LIKELY_NOTABLE; - } else if (numPreviousCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numPreviousCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { - score = Score.SCORE_NONE; - } - return Optional.ofNullable(score); - } - - /** - * Makes a new analysis result of a given type for a content and posts it to - * the blackboard. - * - * @param content The content. - * @param analysisResultType The type of analysis result to make. - * @param analysisResultAttrs The attributes of the new analysis result. - * @param configuration The configuration for the new analysis result. - * @param score The score for the new analysis result. - * @param justification The justification for the new analysis result. - */ - private void makeAndPostAnalysisResult(Content content, BlackboardArtifact.Type analysisResultType, Collection analysisResultAttrs, String configuration, Score score, String justification) { - try { - if (!blackboard.artifactExists(content, analysisResultType, analysisResultAttrs)) { - AnalysisResult analysisResult = content.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs).getAnalysisResult(); - try { - blackboard.postArtifact(analysisResult, CentralRepoIngestModuleFactory.getModuleName(), ingestJobId); - } catch (Blackboard.BlackboardException ex) { - LOGGER.log(Level.SEVERE, String.format("Error posting analysis result '%s' to blackboard for content 's' (job ID=%d)", analysisResult, content, ingestJobId), ex); //NON-NLS - } - } - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format("Error creating %s analysis result for content '%s' (job ID=%d)", analysisResultType, content, ingestJobId), ex); // NON-NLS - } - } - /** * Ensures the data source in the central repository has hash values that * match those in the case database. */ private void syncDataSourceHashes() { - if (!(dataSource instanceof Image)) { + if (!(context.getDataSource() instanceof Image)) { return; } try { + Case currentCase = Case.getCurrentCaseThrows(); CorrelationCase correlationCase = centralRepo.getCase(currentCase); if (correlationCase == null) { correlationCase = centralRepo.newCase(currentCase); } - CorrelationDataSource correlationDataSource = centralRepo.getDataSource(correlationCase, dataSource.getId()); + CorrelationDataSource correlationDataSource = centralRepo.getDataSource(correlationCase, context.getDataSource().getId()); if (correlationDataSource == null) { - correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource); + correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, context.getDataSource()); } - Image image = (Image) dataSource; + Image image = (Image) context.getDataSource(); String imageMd5Hash = image.getMd5(); if (imageMd5Hash == null) { imageMd5Hash = ""; @@ -530,9 +361,9 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (job ID=%d)", dataSource.getName(), ingestJobId), ex); - } catch (TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (job ID=%d)", dataSource.getName(), ingestJobId), ex); + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex); + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java index a6a8e54d62..8599f2e8c8 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java @@ -18,11 +18,10 @@ */ package org.sleuthkit.autopsy.centralrepository.ingestmodule; -import java.util.Arrays; -import java.util.Collection; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.logging.Level; -import java.util.stream.Collectors; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -37,133 +36,38 @@ import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.TimingMetric; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; -import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Blackboard; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; -import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; import org.sleuthkit.datamodel.HashUtility; -import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; -import org.sleuthkit.datamodel.Score; +import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult; /** * A file ingest module that adds correlation attributes for files to the - * central repository and makes previously notable analysis results based on - * previous occurences. + * central repository, and makes previously notable analysis results for files + * marked as notable in other cases. */ -@Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", - "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) final class CentralRepoFileIngestModule implements FileIngestModule { private static final Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); - private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); - private final IngestServices services = IngestServices.getInstance(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); - private long jobId; - private CorrelationCase centralRepoCase; - private CorrelationDataSource centralRepoDataSource; + private final boolean flagNotableItems; + private final boolean saveCorrAttrInstances; + private IngestJobContext context; + private CentralRepository centralRepo; private CorrelationAttributeInstance.Type filesType; - private final boolean flagTaggedNotableItems; - private Blackboard blackboard; - private final boolean createCorrelationProperties; - private CentralRepository centralRepoDb; /** * Constructs a file ingest module that adds correlation attributes for - * files to the central repository and makes previously notable analysis - * results based on previous occurences. + * files to the central repository, and makes previously notable analysis + * results for files marked as notable in other cases. * * @param settings The ingest job settings. */ CentralRepoFileIngestModule(IngestSettings settings) { - flagTaggedNotableItems = settings.isFlagTaggedNotableItems(); - createCorrelationProperties = settings.shouldCreateCorrelationProperties(); - } - - @Override - public ProcessResult process(AbstractFile abstractFile) { - - if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { - return ProcessResult.OK; - } - - if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { - return ProcessResult.OK; - } - - if (!filesType.isEnabled()) { - return ProcessResult.OK; - } - - // get the hash because we're going to correlate it - String md5 = abstractFile.getMd5Hash(); - if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) { - return ProcessResult.OK; - } - - /* - * Search the central repo to see if this file was previously marked as - * being bad. Create artifact if it was. - */ - if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) { - try { - TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query"); - List caseDisplayNamesList = centralRepoDb.getListCasesHavingArtifactInstancesKnownBad(filesType, md5); - HealthMonitor.submitTimingMetric(timingMetric); - if (!caseDisplayNamesList.isEmpty()) { - postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5); - } - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS - return ProcessResult.ERROR; - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS - return ProcessResult.ERROR; - } - } - - // insert this file into the central repository - if (createCorrelationProperties) { - try { - CorrelationAttributeInstance cefi = new CorrelationAttributeInstance( - filesType, - md5, - centralRepoCase, - centralRepoDataSource, - abstractFile.getParentPath() + abstractFile.getName(), - null, - TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database. - , - abstractFile.getId()); - centralRepoDb.addAttributeInstanceBulk(cefi); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS - return ProcessResult.ERROR; - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS - return ProcessResult.ERROR; - } - } - return ProcessResult.OK; - } - - @Override - public void shutDown() { - if (refCounter.decrementAndGet(jobId) == 0) { - try { - centralRepoDb.commitAttributeInstancesBulk(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error committing bulk insert of correlation attributes", ex); // NON-NLS - } - } + flagNotableItems = settings.isFlagTaggedNotableItems(); + saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); } @Messages({ @@ -173,127 +77,126 @@ final class CentralRepoFileIngestModule implements FileIngestModule { }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { - jobId = context.getJobId(); + this.context = context; - /* - * IMPORTANT: Start up IngestModuleException messages are displayed to - * the user, if a user is present. Therefore, an exception to the policy - * that exception messages are not localized is appropriate here. Also, - * the exception messages should be user-friendly. - */ if (!CentralRepository.isEnabled()) { - throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_crNotEnabledErrMsg()); + throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); } - Case autopsyCase; try { - autopsyCase = Case.getCurrentCaseThrows(); - } catch (NoCurrentCaseException ex) { - throw new IngestModuleException(Bundle.CrDataArtifactIngestModule_noCurrentCaseErrMsg(), ex); - } - - blackboard = autopsyCase.getSleuthkitCase().getBlackboard(); - - try { - centralRepoDb = CentralRepository.getInstance(); + centralRepo = CentralRepository.getInstance(); } catch (CentralRepoException ex) { throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex); } + /* + * Make sure the correlation attribute type definition is in the central + * repository. Currently (11/8/21) it is cached, but there is no harm in + * saving it here for use in process(). + */ try { - filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); + filesType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID); } catch (CentralRepoException ex) { throw new IngestModuleException(Bundle.CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg(), ex); } - try { - centralRepoCase = centralRepoDb.getCase(autopsyCase); - } catch (CentralRepoException ex) { - throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrCaseErrMsg(), ex); - } - - try { - centralRepoDataSource = CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource()); - } catch (CentralRepoException ex) { - throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrDataSourceErrMsg(), ex); - } - - refCounter.incrementAndGet(jobId); - } - - /** - * Post a new "previously seen" artifact for the file marked bad. - * - * @param abstractFile The file from which to create an artifact. - * @param caseDisplayNames Case names to be added to a TSK_COMMON attribute. - */ - private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List caseDisplayNames, CorrelationAttributeInstance.Type corrAtrrType, String corrAttrValue) { - String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); - String justification = "Previously marked as notable in cases " + prevCases; - Collection attributes = Arrays.asList(new BlackboardAttribute( - TSK_SET_NAME, MODULE_NAME, - Bundle.CentralRepoIngestModule_prevTaggedSet_text()), - new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - corrAtrrType.getDisplayName()), - new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - corrAttrValue), - new BlackboardAttribute( - TSK_OTHER_CASES, MODULE_NAME, - prevCases)); - try { - // Create artifact if it doesn't already exist. - if (!blackboard.artifactExists(abstractFile, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes)) { - BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult( - BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE, - null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) - .getAnalysisResult(); - try { - blackboard.postArtifact(tifArtifact, MODULE_NAME, jobId); - } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS - } - // send inbox message - sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash(), caseDisplayNames); + /* + * The first module instance started for this job makes sure the current + * case and data source are in the central repository. Currently + * (11/8/21), these are cached upon creation / first retreival. + */ + if (refCounter.incrementAndGet(context.getJobId()) == 1) { + Case currentCase; + try { + currentCase = Case.getCurrentCaseThrows(); + } catch (NoCurrentCaseException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex); + } + + CorrelationCase centralRepoCase; + try { + centralRepoCase = centralRepo.getCase(currentCase); + } catch (CentralRepoException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrCaseErrMsg(), ex); + } + + try { + CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource()); + } catch (CentralRepoException ex) { + throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrDataSourceErrMsg(), ex); } - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS - } catch (IllegalStateException ex) { - logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS } } - /** - * Post a message to the ingest inbox alerting the user that a bad file was - * found. - * - * @param artifact badFile Blackboard Artifact - * @param name badFile's name - * @param md5Hash badFile's md5 hash - * @param caseDisplayNames List of cases that the artifact appears in. - */ - @Messages({ - "CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
", - "CentralRepoIngestModel_name_header=Name:
", - "CentralRepoIngestModel_previous_case_header=
Previous Cases:
", - "# {0} - Name of file that is Notable", - "CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}" - }) - private void sendBadFileInboxMessage(BlackboardArtifact artifact, String name, String md5Hash, List caseDisplayNames) { - StringBuilder detailsSb = new StringBuilder(1024); - - detailsSb.append(Bundle.CentralRepoIngestModule_notable_message_header()).append(Bundle.CentralRepoIngestModel_name_header()); - detailsSb.append(name).append(Bundle.CentralRepoIngestModel_previous_case_header()); - for (String str : caseDisplayNames) { - detailsSb.append(str).append("
"); + @Override + public ProcessResult process(AbstractFile abstractFile) { + if (!flagNotableItems && !saveCorrAttrInstances) { + return ProcessResult.OK; + } + + if (!filesType.isEnabled()) { + return ProcessResult.OK; + } + + if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { + return ProcessResult.OK; + } + + if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { + return ProcessResult.OK; + } + + /* + * The correlation attribute value for a file is its MD5 hash. This + * module cannot do anything with a file if the hash calculation has not + * been done, but the decision has been made to not do a hash + * calculation here if the file hashing and lookup module is not in this + * pipeline ahead of this module (affirmed per BC, 11/8/21). + */ + String md5 = abstractFile.getMd5Hash(); + if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) { + return ProcessResult.OK; + } + + if (flagNotableItems) { + try { + TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query"); + Set otherCases = new HashSet<>(); + otherCases.addAll(centralRepo.getListCasesHavingArtifactInstancesKnownBad(filesType, md5)); + HealthMonitor.submitTimingMetric(timingMetric); + if (!otherCases.isEmpty()) { + makePrevNotableAnalysisResult(abstractFile, otherCases, filesType, md5, context.getDataSource().getId(), context.getJobId()); + } + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS + } + } + + if (saveCorrAttrInstances) { + List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(abstractFile); + for (CorrelationAttributeInstance corrAttr : corrAttrs) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + } + } + } + + return ProcessResult.OK; + } + + @Override + public void shutDown() { + if (refCounter.decrementAndGet(context.getJobId()) == 0) { + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error committing bulk insert of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS + } } - detailsSb.append(""); - services.postMessage(IngestMessage.createDataMessage(MODULE_NAME, - Bundle.CentralRepoIngestModule_postToBB_knownBadMsg(name), - detailsSb.toString(), - name + md5Hash, - artifact)); } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java index f13a614e54..6fb9c352da 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -33,10 +33,11 @@ public interface FileIngestModule extends IngestModule { * IMPORTANT: In addition to returning ProcessResult.OK or * ProcessResult.ERROR, modules should log all errors using methods provided * by the org.sleuthkit.autopsy.coreutils.Logger class. Log messages should - * include the name and object ID of the data being processed. If an - * exception has been caught by the module, the exception should be sent to - * the Logger along with the log message so that a stack trace will appear - * in the application log. + * include the name and object ID of the data being processed and any other + * information that would be useful for debugging. If an exception has been + * caught by the module, the exception should be sent to the logger along + * with the log message so that a stack trace will appear in the application + * log. * * @param file The file to analyze. * diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java index ec83a129c1..6f53ad0f52 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -60,6 +60,11 @@ public interface IngestModule { * must also be taken into consideration when sharing resources between * module instances. See IngestModuleReferenceCounter. * + * IMPORTANT: Start up IngestModuleException messages are displayed to the + * user, if a user is present. Therefore, an exception to the policy that + * exception messages are not localized is appropriate in this method. Also, + * the exception messages should be user-friendly. + * * @param context Provides data and services specific to the ingest job and * the ingest pipeline of which the module is a part. * From 3afddca8d4db54b002a5719ca2c9cfb172ac7ee5 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 10 Nov 2021 13:33:46 -0500 Subject: [PATCH 018/142] event batching --- .../corecomponents/DataResultPanel.java | 26 +++- .../autopsy/mainui/datamodel/AbstractDao.java | 5 +- .../mainui/datamodel/AnalysisResultDAO.java | 19 +-- .../datamodel/BlackboardArtifactEvent.java | 71 ++++++++++ .../mainui/datamodel/DAOAggregateEvent.java | 19 ++- .../autopsy/mainui/datamodel/DAOEvent.java | 26 ++++ .../mainui/datamodel/DAOEventBatcher.java | 91 ++++++++++++ .../mainui/datamodel/DataArtifactDAO.java | 75 +++++++--- .../mainui/datamodel/DataArtifactEvent.java | 30 ++++ .../autopsy/mainui/datamodel/MainDAO.java | 82 ++++++----- .../mainui/datamodel/OsAccountsDAO.java | 17 ++- .../autopsy/mainui/datamodel/TagsDAO.java | 132 +++++++++--------- .../autopsy/mainui/datamodel/ViewsDAO.java | 49 ++++--- .../autopsy/mainui/nodes/DAOFetcher.java | 3 +- .../autopsy/mainui/nodes/SearchManager.java | 10 +- 15 files changed, 475 insertions(+), 180 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index eed210851b..3a301bb644 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -68,6 +68,9 @@ import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSe import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.DAOAggregateEvent; +import org.sleuthkit.autopsy.mainui.datamodel.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; @@ -75,6 +78,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.nodes.SearchResultRootNode; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.TagsDAO.TagFetcher; @@ -172,13 +176,19 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); - private final PropertyChangeListener ingestModuleListener = evt -> { - if (this.searchResultManager != null && this.searchResultManager.isRefreshRequired(evt)) { - refreshSearchResultChildren(); + private final MainDAO mainDAO = MainDAO.getInstance(); + + private final PropertyChangeListener DAOListener = evt -> { + SearchManager manager = this.searchResultManager; + if (manager != null && evt != null && evt.getNewValue() instanceof DAOAggregateEvent) { + DAOAggregateEvent daoAggrEvt = (DAOAggregateEvent) evt.getNewValue(); + if (daoAggrEvt.getEvents().stream().anyMatch((daoEvt) -> manager.isRefreshRequired(daoEvt))) { + refreshSearchResultChildren(); + } } }; - private final PropertyChangeListener weakIngestModuleListener = WeakListeners.propertyChange(ingestModuleListener, null); + private final PropertyChangeListener weakDAOListener = WeakListeners.propertyChange(DAOListener, mainDAO); private static final Set INGEST_JOB_EVENTS = EnumSet.of( IngestManager.IngestJobEvent.COMPLETED, @@ -457,7 +467,8 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private void initListeners() { UserPreferences.addChangeListener(this.pageSizeListener); Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this.weakCaseEventListener); - IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakIngestModuleListener); + this.mainDAO.addPropertyChangeListener(this.weakDAOListener); + IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakDAOListener); IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener); } @@ -467,7 +478,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private void closeListeners() { UserPreferences.removeChangeListener(this.pageSizeListener); Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), this.weakCaseEventListener); - IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakIngestModuleListener); + this.mainDAO.removePropertyChangeListener(this.weakDAOListener); IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener); } @@ -1171,7 +1182,8 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayDataArtifact(DataArtifactSearchParam dataArtifactParams) { try { - this.searchResultManager = new SearchManager(new DataArtifactFetcher(dataArtifactParams), getPageSize()); + DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO(); + this.searchResultManager = new SearchManager(dataArtDAO.new DataArtifactFetcher(dataArtifactParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java index 39d7127b9b..e18a2bc644 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.beans.PropertyChangeEvent; +import java.util.Collection; import java.util.List; /** @@ -34,10 +35,10 @@ abstract class AbstractDAO { /** * Handles an autopsy event (i.e. ingest, case, etc.). * - * @param evt The autopsy event. + * @param evt The autopsy events. * * @return The list of dao events emitted due to this autopsy event. */ - abstract List handleAutopsyEvent(PropertyChangeEvent evt); + abstract List handleAutopsyEvent(Collection evt); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 51848a6935..f259f0b7e4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -24,6 +24,7 @@ import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; +import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -690,7 +691,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - List handleAutopsyEvent(PropertyChangeEvent evt) { + List handleAutopsyEvent(Collection evt) { throw new UnsupportedOperationException("Not supported yet."); } @@ -709,13 +710,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - ModuleDataEvent dataEvent = DAOEventUtils.getModuleDataFromEvt(evt); - if (dataEvent == null) { - return false; - } - - return MainDAO.getInstance().getAnalysisResultDAO().isAnalysisResultsInvalidating(this.getParameters(), dataEvent); + public boolean isRefreshRequired(DAOEvent evt) { + return true; + // GVDTODO +// ModuleDataEvent dataEvent = DAOEventUtils.getModuleDataFromEvt(evt); +// if (dataEvent == null) { +// return false; +// } +// +// return MainDAO.getInstance().getAnalysisResultDAO().isAnalysisResultsInvalidating(this.getParameters(), dataEvent); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java new file mode 100644 index 0000000000..7603b8f761 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java @@ -0,0 +1,71 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * An event for an artifact added in a particular type. + */ +public class BlackboardArtifactEvent implements DAOEvent { + private final long artifactTypeId; + private final long dataSourceId; + + public BlackboardArtifactEvent(long artifactTypeId, long dataSourceId) { + this.artifactTypeId = artifactTypeId; + this.dataSourceId = dataSourceId; + } + + public long getArtifactTypeId() { + return artifactTypeId; + } + + public long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 5; + hash = 67 * hash + (int) (this.artifactTypeId ^ (this.artifactTypeId >>> 32)); + hash = 67 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32)); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final BlackboardArtifactEvent other = (BlackboardArtifactEvent) obj; + if (this.artifactTypeId != other.artifactTypeId) { + return false; + } + if (this.dataSourceId != other.dataSourceId) { + return false; + } + return true; + } + + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java index ebc446206b..78ed63ed05 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java @@ -18,20 +18,29 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; -import java.util.Set; +import java.util.List; +import org.apache.commons.collections4.list.UnmodifiableList; /** * A single event containing an aggregate of all affected data. */ public class DAOAggregateEvent { - private final Set objects; + private final List objects; - public DAOAggregateEvent(Set objects) { - this.objects = objects; + /** + * Main constructor. + * + * @param objects The list of events in this aggregate event. + */ + DAOAggregateEvent(List objects) { + this.objects = UnmodifiableList.unmodifiableList(objects); } - public Set getEvents() { + /** + * @return The list of events in this aggregate event. + */ + public List getEvents() { return objects; } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java new file mode 100644 index 0000000000..7a63cb70af --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java @@ -0,0 +1,26 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * An event emitted by the DAO. + */ +public interface DAOEvent { + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java new file mode 100644 index 0000000000..3f98b8c08a --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java @@ -0,0 +1,91 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +/** + * + * Handles refreshes in DAOs based on incoming events handling throttles + */ +class DAOEventBatcher { + + /** + * The Refresher interface needs to be implemented by ChildFactory instances + * that wish to take advantage of throttled refresh functionality. + */ + public interface BatchedEventsHandler { + + /** + * Handles a list of aggregated events. + * + * @param events The events to handle. + */ + void handle(Collection events); + } + + private final ScheduledThreadPoolExecutor refreshExecutor + = new ScheduledThreadPoolExecutor(1, + new ThreadFactoryBuilder().setNameFormat(DAOEventBatcher.class.getName()).build()); + + private List aggregateEvents = new ArrayList<>(); + private Object eventListLock = new Object(); + private boolean isRunning = false; + + private final BatchedEventsHandler eventsHandler; + private final long batchMillis; + + public DAOEventBatcher(BatchedEventsHandler eventsHandler, long batchMillis) { + this.eventsHandler = eventsHandler; + this.batchMillis = batchMillis; + } + + /** + * Queues an event to be fired as a part of a time-windowed batch. + * @param event The event. + */ + void queueEvent(T event) { + synchronized (this.eventListLock) { + this.aggregateEvents.add(event); + if (!this.isRunning) { + refreshExecutor.schedule(() -> fireEvents(), this.batchMillis, TimeUnit.MILLISECONDS); + this.isRunning = true; + } + } + } + + /** + * Fires all events and clears batch. + */ + private void fireEvents() { + Collection evtsToFire; + synchronized (this.eventListLock) { + evtsToFire = this.aggregateEvents; + this.aggregateEvents = new ArrayList<>(); + this.isRunning = false; + } + + this.eventsHandler.handle(evtsToFire); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 70b00aa8d7..00cb613384 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -23,9 +23,14 @@ import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; @@ -67,19 +72,19 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); String pagedWhereClause = getWhereClause(cacheKey); - + List arts = new ArrayList<>(); arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause)); blackboard.loadBlackboardAttributes(arts); - - long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + + long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); + TableData tableData = createTableData(artType, arts); return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } @@ -110,8 +115,14 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } - public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, ModuleDataEvent eventData) { - return key.getArtifactType().equals(eventData.getBlackboardArtifactType()); + public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { + if (!(eventData instanceof DataArtifactEvent)) { + return false; + } else { + DataArtifactEvent dataArtEvt = (DataArtifactEvent) eventData; + return key.getArtifactType().getTypeID() == dataArtEvt.getArtifactTypeId() + && (key.getDataSourceId() == null || (key.getDataSourceId() == dataArtEvt.getDataSourceId())); + } } public void dropDataArtifactCache() { @@ -160,14 +171,49 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } @Override - List handleAutopsyEvent(PropertyChangeEvent evt) { - ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + List handleAutopsyEvent(Collection evts) { + // get a grouping of artifacts mapping the artifact type id to data source id. + Map> artifactTypeDataSourceMap = new HashMap<>(); + evts.stream() + .map(evt -> DAOEventUtils.getModuleDataFromEvt(evt)) + .filter(dataEvt -> dataEvt != null) + .flatMap(dataEvt -> dataEvt.getArtifacts().stream()) + .forEach((art) -> { + artifactTypeDataSourceMap + .computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjId()); + }); + + // invalidate cache entries that are affected by events + // GVDTODO handle concurrency issues that may arise + List> invalidatedKeys = new ArrayList<>(); + for (SearchParams searchParams : this.dataArtifactCache.asMap().keySet()) { + Set dsIds = artifactTypeDataSourceMap.get(searchParams.getParamData().getArtifactType().getTypeID()); + if (dsIds != null) { + Long searchDsId = searchParams.getParamData().getDataSourceId(); + if (searchDsId == null || dsIds.contains(searchDsId)) { + invalidatedKeys.add(searchParams); + } + } + } + this.dataArtifactCache.invalidateAll(invalidatedKeys); + + // gather dao events based on artifacts + List toRet = new ArrayList<>(); + for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { + int artTypeId = entry.getKey(); + for (Long dsObjId : entry.getValue()) { + toRet.add(new DataArtifactEvent(artTypeId, dsObjId)); + } + } + + return toRet; } /* * Handles fetching and paging of data artifacts. */ - public static class DataArtifactFetcher extends DAOFetcher { + public class DataArtifactFetcher extends DAOFetcher { /** * Main constructor. @@ -184,13 +230,8 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - ModuleDataEvent dataEvent = DAOEventUtils.getModuleDataFromEvt(evt); - if (dataEvent == null) { - return false; - } - - return MainDAO.getInstance().getDataArtifactsDAO().isDataArtifactInvalidating(this.getParameters(), dataEvent); + public boolean isRefreshRequired(DAOEvent evt) { + return DataArtifactDAO.this.isDataArtifactInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java new file mode 100644 index 0000000000..d52a8de18f --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java @@ -0,0 +1,30 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class DataArtifactEvent extends BlackboardArtifactEvent { + + public DataArtifactEvent(long artifactTypeId, long dataSourceId) { + super(artifactTypeId, dataSourceId); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index e4f5fe64d4..f4d5f9f31f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; +import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; import java.util.List; @@ -40,6 +41,10 @@ import org.sleuthkit.autopsy.ingest.IngestManager; */ public class MainDAO extends AbstractDAO { + private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); + private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); + private static final long MILLIS_BATCH = 5000; + private static MainDAO instance = null; public synchronized static MainDAO getInstance() { @@ -50,12 +55,43 @@ public class MainDAO extends AbstractDAO { return instance; } + /** + * The case event listener. + */ + private final PropertyChangeListener caseEventListener = (evt) -> { + if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { + this.clearCaches(); + } else { + processAndFireDAOEvent(evt); + } + }; + + /** + * The user preference listener. + */ + private final PreferenceChangeListener userPreferenceListener = (evt) -> { + this.clearCaches(); + }; + + /** + * The ingest module event listener. + */ + private final PropertyChangeListener ingestModuleEventListener = (evt) -> { + processAndFireDAOEvent(evt); + }; + + private final PropertyChangeSupport support = new PropertyChangeSupport(this); + + private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>((evt) -> this.handleAutopsyEvent(evt), MILLIS_BATCH); + + private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance(); private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance(); private final ViewsDAO viewsDAO = ViewsDAO.getInstance(); private final FileSystemDAO fileSystemDAO = FileSystemDAO.getInstance(); private final TagsDAO tagsDAO = TagsDAO.getInstance(); private final OsAccountsDAO accountsDAO = OsAccountsDAO.getInstance(); + // GVDTODO when events are completely integrated, this list should contain all sub-DAO's private final List allDAOs = ImmutableList.of(dataArtifactDAO); @@ -90,50 +126,13 @@ public class MainDAO extends AbstractDAO { } @Override - List handleAutopsyEvent(PropertyChangeEvent evt) { + List handleAutopsyEvent(Collection evt) { return Stream.of(allDAOs) .map(subDAO -> handleAutopsyEvent(evt)) .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) .collect(Collectors.toList()); } - /** - * The relevant ingest module events. - */ - private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); - - /** - * The relevant case events. - */ - private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); - - /** - * The ingest module event listener. - */ - private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - processAndFireDAOEvent(evt); - }; - - /** - * The case event listener. - */ - private final PropertyChangeListener caseEventListener = (evt) -> { - if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { - this.clearCaches(); - } else { - processAndFireDAOEvent(evt); - } - }; - - /** - * The user preference listener. - */ - private final PreferenceChangeListener userPreferenceListener = (evt) -> { - this.clearCaches(); - }; - - private final PropertyChangeSupport support = new PropertyChangeSupport(this); - public void addPropertyChangeListener(PropertyChangeListener listener) { support.addPropertyChangeListener(listener); } @@ -160,10 +159,7 @@ public class MainDAO extends AbstractDAO { UserPreferences.removeChangeListener(userPreferenceListener); } - void processAndFireDAOEvent(PropertyChangeEvent autopsyEvent) { - List events = this.handleAutopsyEvent(autopsyEvent); - if (CollectionUtils.isNotEmpty(events)) { - support.firePropertyChange(new PropertyChangeEvent(this, "DATA_CHANGE", null, new DAOAggregateEvent(new HashSet<>(events)))); - } + private void processAndFireDAOEvent(PropertyChangeEvent autopsyEvent) { + this.eventBatcher.queueEvent(autopsyEvent); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index a5d34088fe..a4f23ad9a0 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -186,13 +186,16 @@ public class OsAccountsDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - String eventType = evt.getPropertyName(); - if (eventType.equals(Case.Events.OS_ACCOUNTS_ADDED.toString()) - || eventType.equals(Case.Events.OS_ACCOUNTS_DELETED.toString())) { - return true; - } - return false; + public boolean isRefreshRequired(DAOEvent evt) { + return true; + + //GVDTODO +// String eventType = evt.getPropertyName(); +// if (eventType.equals(Case.Events.OS_ACCOUNTS_ADDED.toString()) +// || eventType.equals(Case.Events.OS_ACCOUNTS_DELETED.toString())) { +// return true; +// } +// return false; } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index bf5e844e72..cfacaaca1b 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -297,72 +297,76 @@ public class TagsDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - TagsSearchParams params = this.getParameters(); - String eventType = evt.getPropertyName(); + public boolean isRefreshRequired(DAOEvent evt) { + return true; - // handle artifact/result tag changes - if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString()) - || eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) { - - // ignore non-artifact/result tag changes - if (params.getTagType() != TagsSearchParams.TagType.RESULT) { - return false; - } - - if (evt instanceof AutopsyEvent) { - if (evt instanceof BlackBoardArtifactTagAddedEvent) { - // An artifact associated with the current case has been tagged. - BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; - // ensure tag added event has a valid content id - if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) { - return false; - } - return params.getTagName().getId() == event.getAddedTag().getId(); - } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { - // A tag has been removed from an artifact associated with the current case. - BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; - // ensure tag deleted event has a valid content id - BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); - if (deletedTagInfo == null) { - return false; - } - return params.getTagName().getId() == deletedTagInfo.getTagID(); - } - } - } - - // handle file/content tag changes - if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString()) - || eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { - - // ignore non-file/content tag changes - if (params.getTagType() != TagsSearchParams.TagType.FILE) { - return false; - } + // GVDTODO - if (evt instanceof AutopsyEvent) { - if (evt instanceof ContentTagAddedEvent) { - // Content associated with the current case has been tagged. - ContentTagAddedEvent event = (ContentTagAddedEvent) evt; - // ensure tag added event has a valid content id - if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) { - return false; - } - return params.getTagName().getId() == event.getAddedTag().getId(); - } else if (evt instanceof ContentTagDeletedEvent) { - // A tag has been removed from content associated with the current case. - ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; - // ensure tag deleted event has a valid content id - ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); - if (deletedTagInfo == null) { - return false; - } - return params.getTagName().getId() == deletedTagInfo.getTagID(); - } - } - } - return false; +// TagsSearchParams params = this.getParameters(); +// String eventType = evt.getPropertyName(); +// +// // handle artifact/result tag changes +// if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString()) +// || eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) { +// +// // ignore non-artifact/result tag changes +// if (params.getTagType() != TagsSearchParams.TagType.RESULT) { +// return false; +// } +// +// if (evt instanceof AutopsyEvent) { +// if (evt instanceof BlackBoardArtifactTagAddedEvent) { +// // An artifact associated with the current case has been tagged. +// BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; +// // ensure tag added event has a valid content id +// if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) { +// return false; +// } +// return params.getTagName().getId() == event.getAddedTag().getId(); +// } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { +// // A tag has been removed from an artifact associated with the current case. +// BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; +// // ensure tag deleted event has a valid content id +// BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); +// if (deletedTagInfo == null) { +// return false; +// } +// return params.getTagName().getId() == deletedTagInfo.getTagID(); +// } +// } +// } +// +// // handle file/content tag changes +// if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString()) +// || eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { +// +// // ignore non-file/content tag changes +// if (params.getTagType() != TagsSearchParams.TagType.FILE) { +// return false; +// } +// +// if (evt instanceof AutopsyEvent) { +// if (evt instanceof ContentTagAddedEvent) { +// // Content associated with the current case has been tagged. +// ContentTagAddedEvent event = (ContentTagAddedEvent) evt; +// // ensure tag added event has a valid content id +// if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) { +// return false; +// } +// return params.getTagName().getId() == event.getAddedTag().getId(); +// } else if (evt instanceof ContentTagDeletedEvent) { +// // A tag has been removed from content associated with the current case. +// ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; +// // ensure tag deleted event has a valid content id +// ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); +// if (deletedTagInfo == null) { +// return false; +// } +// return params.getTagName().getId() == deletedTagInfo.getTagID(); +// } +// } +// } +// return false; } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 33914fb6e9..454cf6ea27 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -662,13 +662,15 @@ public class ViewsDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = DAOEventUtils.getContentFromEvt(evt); - if (content == null) { - return false; - } - - return MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating(this.getParameters(), content); + public boolean isRefreshRequired(DAOEvent evt) { + return true; + // GVDTODO +// Content content = DAOEventUtils.getContentFromEvt(evt); +// if (content == null) { +// return false; +// } +// +// return MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating(this.getParameters(), content); } } @@ -692,13 +694,15 @@ public class ViewsDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = DAOEventUtils.getContentFromEvt(evt); - if (content == null) { - return false; - } - - return MainDAO.getInstance().getViewsDAO().isFilesByMimeInvalidating(this.getParameters(), content); + public boolean isRefreshRequired(DAOEvent evt) { + return true; + // GVDTODO +// Content content = DAOEventUtils.getContentFromEvt(evt); +// if (content == null) { +// return false; +// } +// +// return MainDAO.getInstance().getViewsDAO().isFilesByMimeInvalidating(this.getParameters(), content); } } @@ -722,13 +726,16 @@ public class ViewsDAO { } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = DAOEventUtils.getContentFromEvt(evt); - if (content == null) { - return false; - } - - return MainDAO.getInstance().getViewsDAO().isFilesBySizeInvalidating(this.getParameters(), content); + public boolean isRefreshRequired(DAOEvent evt) { + return true; + + // GVDTODO +// Content content = DAOEventUtils.getContentFromEvt(evt); +// if (content == null) { +// return false; +// } +// +// return MainDAO.getInstance().getViewsDAO().isFilesBySizeInvalidating(this.getParameters(), content); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java index a4f60d3cfe..43552ee959 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.beans.PropertyChangeEvent; import java.util.concurrent.ExecutionException; +import org.sleuthkit.autopsy.mainui.datamodel.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; /** @@ -70,5 +71,5 @@ public abstract class DAOFetcher

{ * * @return True if the */ - public abstract boolean isRefreshRequired(PropertyChangeEvent evt); + public abstract boolean isRefreshRequired(DAOEvent evt); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java index d84c6cc313..bf55366bbb 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java @@ -18,9 +18,9 @@ */ package org.sleuthkit.autopsy.mainui.nodes; -import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.concurrent.ExecutionException; +import org.sleuthkit.autopsy.mainui.datamodel.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; /** @@ -171,11 +171,11 @@ public class SearchManager { /** * Determines if a refresh is required for the currently selected item. * - * @param evt The ingest module event. + * @param evt The event. * * @return True if an update is required. */ - public synchronized boolean isRefreshRequired(PropertyChangeEvent evt) { + public synchronized boolean isRefreshRequired(DAOEvent evt) { return isRefreshRequired(this.daoFetcher, evt); } @@ -183,11 +183,11 @@ public class SearchManager { * Determines if a refresh is required for the currently selected item. * * @param dataFetcher The data fetcher. - * @param evt The ingest module event. + * @param evt The event. * * @return True if an update is required. */ - private synchronized

boolean isRefreshRequired(DAOFetcher

dataFetcher, PropertyChangeEvent evt) { + private synchronized

boolean isRefreshRequired(DAOFetcher

dataFetcher, DAOEvent evt) { if (dataFetcher == null) { return false; } From fce28bfc41526ae82a57670c888866a42317a585 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 10 Nov 2021 14:23:17 -0500 Subject: [PATCH 019/142] bug fixes --- .../corecomponents/DataResultPanel.java | 32 +++++++------- .../mainui/datamodel/DataArtifactDAO.java | 2 +- .../autopsy/mainui/datamodel/MainDAO.java | 43 ++++++++++++++----- 3 files changed, 50 insertions(+), 27 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 3a301bb644..dc7e57a0b8 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -146,15 +146,15 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ps.postPageSizeChangeEvent(); }); - try { - if (this.searchResultManager != null) { - DAOFetcher previousFetcher = this.searchResultManager.getDaoFetcher(); - this.searchResultManager = new SearchManager(previousFetcher, newPageSize); - displaySearchResults(this.searchResultManager.getResults(), false); - } - } catch (IllegalArgumentException | ExecutionException ex) { - logger.log(Level.WARNING, "There was an error while updating page size", ex); - } +// try { +// if (this.searchResultManager != null) { +// DAOFetcher previousFetcher = this.searchResultManager.getDaoFetcher(); +// this.searchResultManager = new SearchManager(previousFetcher, newPageSize); +// displaySearchResults(this.searchResultManager.getResults(), false); +// } +// } catch (IllegalArgumentException | ExecutionException ex) { +// logger.log(Level.WARNING, "There was an error while updating page size", ex); +// } } }; @@ -163,7 +163,8 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PropertyChangeListener caseEventListener = evt -> { String evtName = evt.getPropertyName(); if (Case.Events.DATA_SOURCE_ADDED.toString().equals(evtName)) { - refreshSearchResultChildren(); + // GVDTODO could potentially be removed +// refreshSearchResultChildren(); } else if (Case.Events.CURRENT_CASE.toString().equals(evtName) && evt.getNewValue() == null) { nodeNameToPageCountListenerMap.clear(); } @@ -195,11 +196,12 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C IngestManager.IngestJobEvent.CANCELLED); private final PropertyChangeListener ingestJobListener = (PropertyChangeEvent evt) -> { - String eventType = evt.getPropertyName(); - if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) - || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { - refreshSearchResultChildren(); - } + // GVDTODO could potentially be removed +// String eventType = evt.getPropertyName(); +// if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) +// || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { +// refreshSearchResultChildren(); +// } }; private final PropertyChangeListener weakIngestJobListener = WeakListeners.propertyChange(ingestJobListener, null); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 00cb613384..6a263aa1e7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -181,7 +181,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .forEach((art) -> { artifactTypeDataSourceMap .computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) - .add(art.getDataSourceObjId()); + .add(art.getDataSourceObjectID()); }); // invalidate cache entries that are affected by events diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index f4d5f9f31f..72a98fb76c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -24,7 +24,6 @@ import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.Collection; import java.util.EnumSet; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.prefs.PreferenceChangeListener; @@ -44,12 +43,13 @@ public class MainDAO extends AbstractDAO { private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); private static final long MILLIS_BATCH = 5000; - + private static MainDAO instance = null; public synchronized static MainDAO getInstance() { if (instance == null) { instance = new MainDAO(); + instance.register(); } return instance; @@ -62,7 +62,7 @@ public class MainDAO extends AbstractDAO { if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { this.clearCaches(); } else { - processAndFireDAOEvent(evt); + handleAutopsyEvent(evt); } }; @@ -77,21 +77,19 @@ public class MainDAO extends AbstractDAO { * The ingest module event listener. */ private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - processAndFireDAOEvent(evt); + handleAutopsyEvent(evt); }; private final PropertyChangeSupport support = new PropertyChangeSupport(this); - private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>((evt) -> this.handleAutopsyEvent(evt), MILLIS_BATCH); - - + private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>((evts) -> this.getDAOEventsAndFire(evts), MILLIS_BATCH); + private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance(); private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance(); private final ViewsDAO viewsDAO = ViewsDAO.getInstance(); private final FileSystemDAO fileSystemDAO = FileSystemDAO.getInstance(); private final TagsDAO tagsDAO = TagsDAO.getInstance(); private final OsAccountsDAO accountsDAO = OsAccountsDAO.getInstance(); - // GVDTODO when events are completely integrated, this list should contain all sub-DAO's private final List allDAOs = ImmutableList.of(dataArtifactDAO); @@ -127,12 +125,25 @@ public class MainDAO extends AbstractDAO { @Override List handleAutopsyEvent(Collection evt) { - return Stream.of(allDAOs) - .map(subDAO -> handleAutopsyEvent(evt)) + return allDAOs.stream() + .map(subDAO -> subDAO.handleAutopsyEvent(evt)) .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) .collect(Collectors.toList()); } + /** + * Determines DAO events from autopsy events and fires DAO aggregate event + * if there are any created DAO events. + * + * @param evts The autopsy events. + */ + private void getDAOEventsAndFire(Collection evts) { + List daoEvents = handleAutopsyEvent(evts); + if (!CollectionUtils.isEmpty(daoEvents)) { + support.firePropertyChange(new PropertyChangeEvent(this, "DATA_CHANGE", null, new DAOAggregateEvent(daoEvents))); + } + } + public void addPropertyChangeListener(PropertyChangeListener listener) { support.addPropertyChangeListener(listener); } @@ -150,6 +161,11 @@ public class MainDAO extends AbstractDAO { UserPreferences.addChangeListener(userPreferenceListener); } + @Override + protected void finalize() throws Throwable { + unregister(); + } + /** * Unregisters listeners from autopsy event publishers. */ @@ -159,7 +175,12 @@ public class MainDAO extends AbstractDAO { UserPreferences.removeChangeListener(userPreferenceListener); } - private void processAndFireDAOEvent(PropertyChangeEvent autopsyEvent) { + /** + * Handle incoming autopsy event by queueing in batch and firing events. + * + * @param autopsyEvent The autopsy event. + */ + private void handleAutopsyEvent(PropertyChangeEvent autopsyEvent) { this.eventBatcher.queueEvent(autopsyEvent); } } From fcfe3bee295b2dabd3d68aa1cc7f80e94e7396b5 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 10 Nov 2021 15:19:24 -0500 Subject: [PATCH 020/142] inherit from AbstractDAO --- .../mainui/datamodel/AnalysisResultEvent.java | 48 +++++++++++++++++++ .../mainui/datamodel/FileSystemDAO.java | 2 +- .../autopsy/mainui/datamodel/MainDAO.java | 10 +++- .../mainui/datamodel/OsAccountsDAO.java | 2 +- .../autopsy/mainui/datamodel/TagsDAO.java | 2 +- .../autopsy/mainui/datamodel/ViewsDAO.java | 3 +- 6 files changed, 60 insertions(+), 7 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java new file mode 100644 index 0000000000..981b846a0c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java @@ -0,0 +1,48 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class AnalysisResultEvent extends BlackboardArtifactEvent { + private final String setName; + private final String regex; + private final String matchString; + + AnalysisResultEvent(long artifactTypeId, long dataSourceId, String setName, String regex, String matchString) { + super(artifactTypeId, dataSourceId); + this.setName = setName; + this.regex = regex; + this.matchString = matchString; + } + + public String getSetName() { + return setName; + } + + public String getRegex() { + return regex; + } + + public String getMatchString() { + return matchString; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 75e99ab7aa..c31f468565 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -39,7 +39,7 @@ import org.sleuthkit.datamodel.TskCoreException; /** * */ -public class FileSystemDAO { +public class FileSystemDAO extends AbstractDAO { private static final int CACHE_SIZE = 15; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 72a98fb76c..3da6840850 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -91,8 +91,14 @@ public class MainDAO extends AbstractDAO { private final TagsDAO tagsDAO = TagsDAO.getInstance(); private final OsAccountsDAO accountsDAO = OsAccountsDAO.getInstance(); - // GVDTODO when events are completely integrated, this list should contain all sub-DAO's - private final List allDAOs = ImmutableList.of(dataArtifactDAO); + // NOTE: whenever adding a new sub-dao, it should be added to this list for event updates. + private final List allDAOs = ImmutableList.of( + dataArtifactDAO, + analysisResultDAO, + viewsDAO, + fileSystemDAO, + tagsDAO, + accountsDAO); public DataArtifactDAO getDataArtifactsDAO() { return dataArtifactDAO; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index a4f23ad9a0..2bab487994 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -54,7 +54,7 @@ import org.sleuthkit.datamodel.TskCoreException; "OsAccountsDAO.createSheet.comment.displayName=C", "OsAccountsDAO.createSheet.count.displayName=O", "OsAccountsDAO.fileColumns.noDescription=No Description",}) -public class OsAccountsDAO { +public class OsAccountsDAO extends AbstractDAO { private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index cfacaaca1b..64c7f85523 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -72,7 +72,7 @@ import org.sleuthkit.datamodel.TskCoreException; "TagsDAO.tagColumns.typeColLbl=Result Type", "TagsDAO.tagColumns.commentColLbl=Comment", "TagsDAO.tagColumns.userNameColLbl=User Name"}) -public class TagsDAO { +public class TagsDAO extends AbstractDAO { private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 454cf6ea27..1d8ea05ce5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; -import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; @@ -55,7 +54,7 @@ import org.sleuthkit.datamodel.TskData; * Provides information to populate the results viewer for data in the views * section. */ -public class ViewsDAO { +public class ViewsDAO extends AbstractDAO { private static final Logger logger = Logger.getLogger(ViewsDAO.class.getName()); From a8696217f81cefee9d98fbf6360b10227e3bb91d Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 10 Nov 2021 15:20:17 -0500 Subject: [PATCH 021/142] private constructor for events --- .../autopsy/mainui/datamodel/BlackboardArtifactEvent.java | 2 +- .../sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java index 7603b8f761..05f331fbc2 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java @@ -25,7 +25,7 @@ public class BlackboardArtifactEvent implements DAOEvent { private final long artifactTypeId; private final long dataSourceId; - public BlackboardArtifactEvent(long artifactTypeId, long dataSourceId) { + BlackboardArtifactEvent(long artifactTypeId, long dataSourceId) { this.artifactTypeId = artifactTypeId; this.dataSourceId = dataSourceId; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java index d52a8de18f..3403d9e3e6 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java @@ -24,7 +24,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; */ public class DataArtifactEvent extends BlackboardArtifactEvent { - public DataArtifactEvent(long artifactTypeId, long dataSourceId) { + DataArtifactEvent(long artifactTypeId, long dataSourceId) { super(artifactTypeId, dataSourceId); } } From cc7e94247b0477bdc396038f477f86a890d97e3a Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 10 Nov 2021 17:53:15 -0500 Subject: [PATCH 022/142] 7895 CR data artifact ingest module --- .../contentviewer/Bundle.properties-MERGED | 3 +++ .../ingestmodule/Bundle.properties-MERGED | 15 +++++++-------- .../CentralRepoDataArtifactIngestModule.java | 7 ------- 3 files changed, 10 insertions(+), 15 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED index cccefccf80..a97cc319da 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/Bundle.properties-MERGED @@ -24,6 +24,9 @@ OtherOccurrencesPanel.caseDetailsDialog.noCaseNameError=Error OtherOccurrencesPanel.caseDetailsDialog.noDetails=No details for this case. OtherOccurrencesPanel.caseDetailsDialog.noDetailsReference=No case details for Global reference properties. OtherOccurrencesPanel.caseDetailsDialog.notSelected=No Row Selected +# {0} - commonality percentage +# {1} - correlation type +# {2} - correlation value OtherOccurrencesPanel.correlatedArtifacts.byType={0}% of data sources have {2} (type: {1})\n OtherOccurrencesPanel.correlatedArtifacts.failed=Failed to get frequency details. OtherOccurrencesPanel.correlatedArtifacts.isEmpty=There are no files or artifacts to correlate. diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED index de6d91c29f..cf2f2f4a12 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties-MERGED @@ -1,21 +1,20 @@ -CentralRepoIngestModel_name_header=Name:
-CentralRepoIngestModel_previous_case_header=
Previous Cases:
-CentralRepoIngestModule.prevCaseComment.text=Previous Case: -CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository) +CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled +CentralRepoIngestModule_filename_inbox_msg_header=File Name +CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case -CentralRepoIngestModule_notable_message_header=A file in this data source was previously seen and tagged as Notable.
+CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute +# {0} - Name of item that is Notable +CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0} # {0} - list of cases CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0} CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository) -CentralRepoIngestModule_osAcctMgrInaccessibleErrMsg=Error getting OS accounts manager -# {0} - Name of file that is Notable -CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0} +CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases # {0} - list of cases CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0} CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index ff6393b638..74a42a3f43 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.centralrepository.ingestmodule; import java.util.ArrayList; import java.util.HashSet; -import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; @@ -66,7 +65,6 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo private final boolean flagPrevSeenDevices; private final boolean flagUniqueArtifacts; private final boolean saveCorrAttrInstances; - private final Set corrAttrsAnalyzed; private CentralRepository centralRepo; private IngestJobContext context; @@ -84,7 +82,6 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo flagPrevSeenDevices = settings.isFlagPreviousDevices(); flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); - corrAttrsAnalyzed = new LinkedHashSet<>(); } @NbBundle.Messages({ @@ -201,10 +198,6 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } for (CorrelationAttributeInstance corrAttr : corrAttrs) { - if (!corrAttrsAnalyzed.add(corrAttr.toString())) { - continue; - } - if (artifact != null) { makeAnalysisResults(artifact, corrAttr); } else { From a698863a0fd084cb59345fafba12f5fa867fc8ef Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 10 Nov 2021 17:57:38 -0500 Subject: [PATCH 023/142] 7895 CR data artifact ingest module --- .../CentralRepoIngestModuleUtils.java | 336 ++++++++++++++++++ 1 file changed, 336 insertions(+) create mode 100755 Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java new file mode 100755 index 0000000000..e1f2582478 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleUtils.java @@ -0,0 +1,336 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021-2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.centralrepository.ingestmodule; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.logging.Level; +import java.util.stream.Collectors; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException; +import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; +import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.AnalysisResult; +import org.sleuthkit.datamodel.Blackboard; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES; +import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataArtifact; +import org.sleuthkit.datamodel.Score; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Utility methods shared by the central repository ingest modules. + */ +class CentralRepoIngestModuleUtils { + + private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName()); + private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10; + private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20; + private final static String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName(); + + /** + * Gets any previous occurrences of a given correlation attribute in cases + * other than the current case. + * + * @param corrAttr The correlation attribute. + * + * @return The other occurrences of the correlation attribute. + */ + static List getOccurrencesInOtherCases(CorrelationAttributeInstance corrAttr, long ingestJobId) { + List previousOccurrences = new ArrayList<>(); + try { + CentralRepository centralRepo = CentralRepository.getInstance(); + previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue()); + for (Iterator iterator = previousOccurrences.iterator(); iterator.hasNext();) { + CorrelationAttributeInstance prevOccurrence = iterator.next(); + if (prevOccurrence.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) { + iterator.remove(); + } + } + } catch (CorrelationAttributeNormalizationException ex) { + LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value for 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS + } + return previousOccurrences; + } + + /** + * Makes a previously notable analysis result for a content. + * + * @param content The content. + * @param previousCases The names of the cases in which the artifact was + * deemed notable. + * @param corrAttrType The type of the matched correlation attribute. + * @param corrAttrValue The value of the matched correlation attribute. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)", + "# {0} - list of cases", + "CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}" + }) + static void makePrevNotableAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) { + String prevCases = previousCases.stream().collect(Collectors.joining(",")); + String justification = Bundle.CentralRepoIngestModule_notableJustification(prevCases); + Collection attributes = Arrays.asList( + new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_notableSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + Optional result = makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification, dataSourceObjId, ingestJobId); + if (result.isPresent()) { + postNotableMessage(content, previousCases, corrAttrValue, result.get()); + } + } + + /** + * Makes a previously seen analysis result for a content, unless the content + * is too common. + * + * @param content The content. + * @param previousCases The names of the cases in which the artifact was + * previously seen. + * @param corrAttrType The type of the matched correlation attribute. + * @param corrAttrValue The value of the matched correlation attribute. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)", + "# {0} - list of cases", + "CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}" + }) + static void makePrevSeenAnalysisResult(Content content, Set previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) { + Optional score = calculateScore(previousCases.size()); + if (score.isPresent()) { + String prevCases = previousCases.stream().collect(Collectors.joining(",")); + String justification = Bundle.CentralRepoIngestModule_prevSeenJustification(prevCases); + Collection analysisResultAttributes = Arrays.asList( + new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_prevSeenSetName()), + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue), + new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases)); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score.get(), justification, dataSourceObjId, ingestJobId); + } + } + + /** + * Makes a previously unseen analysis result for a content. + * + * @param content The content. + * @param corrAttrType The type of the new correlation attribute. + * @param corrAttrValue The value of the new correlation attribute. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases" + }) + static void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) { + Collection attributesForNewArtifact = Arrays.asList( + new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()), + new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue)); + makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CentralRepoIngestModule_prevUnseenJustification(), dataSourceObjId, ingestJobId); + } + + /** + * Calculates a score based in a number of previous cases. + * + * @param numPreviousCases The number of previous cases. + * + * @return An Optional of a score, will be empty if there is no score + * because the number of previous cases is too high, indicating a + * common and therefore uninteresting item. + */ + static Optional calculateScore(int numPreviousCases) { + Score score = null; + if (numPreviousCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) { + score = Score.SCORE_LIKELY_NOTABLE; + } else if (numPreviousCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numPreviousCases <= MAX_PREV_CASES_FOR_PREV_SEEN) { + score = Score.SCORE_NONE; + } + return Optional.ofNullable(score); + } + + /** + * Makes a new analysis result of a given type for a content and posts it to + * the blackboard. + * + * @param content The content. + * @param analysisResultType The type of analysis result to make. + * @param analysisResultAttrs The attributes of the new analysis result. + * @param configuration The configuration for the new analysis result. + * @param score The score for the new analysis result. + * @param justification The justification for the new analysis result. + * @param dataSourceObjId The data source object ID. + * @param ingestJobId The ingest job ID. + * + * @return The analysis result or null if the result already existed or an + * error that prevented creation of the analysis result occurred. + */ + private static Optional makeAndPostAnalysisResult(Content content, BlackboardArtifact.Type analysisResultType, Collection analysisResultAttrs, String configuration, Score score, String justification, long dataSourceObjId, long ingestJobId) { + AnalysisResult analysisResult = null; + try { + Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); + if (!blackboard.artifactExists(content, analysisResultType, analysisResultAttrs)) { + analysisResult = content.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs, dataSourceObjId).getAnalysisResult(); + try { + blackboard.postArtifact(analysisResult, MODULE_NAME, ingestJobId); + } catch (Blackboard.BlackboardException ex) { + LOGGER.log(Level.SEVERE, String.format("Error posting analysis result '%s' to blackboard for content 's' (job ID=%d)", analysisResult, content, ingestJobId), ex); //NON-NLS + } + } + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error creating %s analysis result for content '%s' (job ID=%d)", analysisResultType, content, ingestJobId), ex); // NON-NLS + } + return Optional.ofNullable(analysisResult); + } + + /** + * Posts a message to the ingest messages inbox to notify the user that a + * notable content has been found, i.e., a previously notable analysis + * result has been created. + * + * @param content The notable content. + * @param otherCases The other cases in which the content was marked as + * notable. + * @param corrAttrValue The correlation attribute value used to identify + * the content, used by the ingest inbox as a unique + * key for message grouping. + * @param analysisResult The previously notable analysis result. + */ + @NbBundle.Messages({ + "# {0} - Name of item that is Notable", + "CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0}" + }) + private static void postNotableMessage(Content content, Set otherCases, String corrAttrValue, AnalysisResult analysisResult) { + String msgSubject = null; + String msgDetails = null; + String msgKey = corrAttrValue; + if (content instanceof AbstractFile) { + AbstractFile file = (AbstractFile) content; + msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(file.getName()); + msgDetails = makeNotableFileMessage(file, otherCases); + } else if (content instanceof DataArtifact) { + DataArtifact artifact = (DataArtifact) content; + msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(artifact.getDisplayName()); + msgDetails = makeNotableDataArtifactMessage(artifact, corrAttrValue, otherCases); + } else { + LOGGER.log(Level.SEVERE, "Unsupported Content, cannot post ingest inbox message"); + } + if (msgSubject != null && msgDetails != null) { + IngestServices.getInstance().postMessage( + IngestMessage.createDataMessage( + MODULE_NAME, + msgSubject, + msgDetails, + msgKey, + analysisResult)); + } + } + + /** + * Makes an ingest inbox message for a notable file. Uses similar HTML + * markup as is used for this purpose by the hash lookup ingest module. + * + * @param file The notable file. + * @param otherCases The cases other than the current case in which the file + * was marked as nmotable. + * + * @return The message. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_filename_inbox_msg_header=File Name", + "CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash", + "CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases" + }) + private static String makeNotableFileMessage(AbstractFile file, Set otherCases) { + StringBuilder message = new StringBuilder(1024); + message.append(""); //NON-NLS + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_filename_inbox_msg_header(), file.getName()); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_md5Hash_inbox_msg_header(), file.getMd5Hash()); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(","))); + return message.toString(); + } + + /** + * Makes an ingest inbox message for a notable data artifact. Uses similar + * HTML markup as is used for this purpose by the hash lookup ingest module. + * + * @param artifact The data artifact + * @param corrAttrValue The notable attribute (correlation attribute value). + * @param otherCases The cases other than the current case in which the + * artifact was marked as nmotable. + * + * @return The message. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type", + "CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute" + }) + private static String makeNotableDataArtifactMessage(DataArtifact artifact, String corrAttrValue, Set otherCases) { + StringBuilder message = new StringBuilder(1024); + message.append("
"); //NON-NLS + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_artifact_type_inbox_msg_header(), artifact.getDisplayName()); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_notable_attr_inbox_msg_header(), corrAttrValue); + addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(","))); + message.append("
"); //NON-NLS + return message.toString(); + } + + /** + * Adds a table row to a notable item message (HTML). + * + * @param message The string builder for the message. + * @param headerText The table row header text. + * @param cellText The table row cell text. + */ + private static void addTableRowMarkup(StringBuilder message, String headerText, String cellText) { + message.append(""); //NON-NLS + message.append("").append(headerText).append(""); //NON-NLS + message.append("").append(cellText).append(""); //NON-NLS + message.append(""); //NON-NLS + } + + /* + * Prevents instatiation of this utility class. + */ + private CentralRepoIngestModuleUtils() { + } + +} From 5fcd6ba9a7e8533d2f345fe454fa3fff811fe6d1 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 10 Nov 2021 19:29:48 -0500 Subject: [PATCH 024/142] working through analysis results mostly --- .../mainui/datamodel/AnalysisResultDAO.java | 109 +++++++++++++----- .../mainui/datamodel/AnalysisResultEvent.java | 19 +-- .../datamodel/AnalysisResultSetEvent.java | 36 ++++++ .../mainui/datamodel/DataArtifactDAO.java | 3 +- .../mainui/datamodel/KeywordHitEvent.java | 42 +++++++ 5 files changed, 159 insertions(+), 50 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index f259f0b7e4..172134d05a 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -27,12 +27,16 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -268,10 +272,27 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); } - public boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, ModuleDataEvent eventData) { - return key.getArtifactType().equals(eventData.getBlackboardArtifactType()); + public boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) { + if (!(eventData instanceof AnalysisResultEvent)) { + return false; + } + + AnalysisResultEvent analysisResultEvt = (AnalysisResultEvent) eventData; + return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactTypeId() + && (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId()); } + public boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) { + if (!(event instanceof AnalysisResultSetEvent)) { + return false; + } + + AnalysisResultSetEvent setEvent = (AnalysisResultSetEvent) event; + return isAnalysisResultsInvalidating((AnalysisResultSearchParam) key, (AnalysisResultEvent) setEvent) + && Objects.equals(key.getSetName(), setEvent.getSetName()); + } + + // GVDTODO handle keyword hits public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " @@ -691,41 +712,53 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - List handleAutopsyEvent(Collection evt) { - throw new UnsupportedOperationException("Not supported yet."); - } + List handleAutopsyEvent(Collection evts) { + // get a grouping of artifacts mapping the artifact type id to data source id. + Map> analysisResultMap = new HashMap<>(); + Map, Set> setMap = new HashMap<>(); + Map> keywordHitsMap = new HashMap<>(); - /** - * Handles basic functionality of fetching and paging of analysis results. - */ - static abstract class AbstractAnalysisResultFetcher extends DAOFetcher { + for (PropertyChangeEvent evt : evts) { + ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + if (dataEvt != null) { + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { + // GVDTODO + } else if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() + || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()) { - /** - * Main constructor. - * - * @param params Parameters to handle fetching of data. - */ - public AbstractAnalysisResultFetcher(T params) { - super(params); + BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); + String setName = setAttr == null ? null : setAttr.getValueString(); + setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + + } else if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { + analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } + } + } } - @Override - public boolean isRefreshRequired(DAOEvent evt) { - return true; - // GVDTODO -// ModuleDataEvent dataEvent = DAOEventUtils.getModuleDataFromEvt(evt); -// if (dataEvent == null) { -// return false; -// } -// -// return MainDAO.getInstance().getAnalysisResultDAO().isAnalysisResultsInvalidating(this.getParameters(), dataEvent); - } + // invalidate cache entries that are affected by events + // GVDTODO handle concurrency issues that may arise + Stream analysisResultEvts = analysisResultMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); + + Stream analysisResultSetEvts = setMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId))); + + // GVDTODO handle keyword hits + return Stream.of(analysisResultEvts, analysisResultSetEvts) + .flatMap(s -> s) + .collect(Collectors.toList()); } /** * Handles fetching and paging of analysis results. */ - public static class AnalysisResultFetcher extends AbstractAnalysisResultFetcher { + public class AnalysisResultFetcher extends DAOFetcher { /** * Main constructor. @@ -740,12 +773,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return AnalysisResultDAO.this.isAnalysisResultsInvalidating(this.getParameters(), evt); + } } /** * Handles fetching and paging of hashset hits. */ - public static class AnalysisResultSetFetcher extends AbstractAnalysisResultFetcher { + public class AnalysisResultSetFetcher extends DAOFetcher { /** * Main constructor. @@ -760,12 +798,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return AnalysisResultDAO.this.isAnalysisResultsSetInvalidating(this.getParameters(), evt); + } } /** * Handles fetching and paging of keyword hits. */ - public static class KeywordHitResultFetcher extends AbstractAnalysisResultFetcher { + public static class KeywordHitResultFetcher extends DAOFetcher { /** * Main constructor. @@ -780,5 +823,11 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { return MainDAO.getInstance().getAnalysisResultDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } + + @Override + public boolean isRefreshRequired(DAOEvent evt) { + // GVDTODO + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java index 981b846a0c..edda77261f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java @@ -23,26 +23,9 @@ package org.sleuthkit.autopsy.mainui.datamodel; * particular data source. */ public class AnalysisResultEvent extends BlackboardArtifactEvent { - private final String setName; - private final String regex; - private final String matchString; - AnalysisResultEvent(long artifactTypeId, long dataSourceId, String setName, String regex, String matchString) { + AnalysisResultEvent(long artifactTypeId, long dataSourceId) { super(artifactTypeId, dataSourceId); - this.setName = setName; - this.regex = regex; - this.matchString = matchString; } - public String getSetName() { - return setName; - } - - public String getRegex() { - return regex; - } - - public String getMatchString() { - return matchString; - } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java new file mode 100644 index 0000000000..453b0df8bd --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java @@ -0,0 +1,36 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class AnalysisResultSetEvent extends AnalysisResultEvent { + private final String setName; + + public AnalysisResultSetEvent(String setName, long artifactTypeId, long dataSourceId) { + super(artifactTypeId, dataSourceId); + this.setName = setName; + } + + public String getSetName() { + return setName; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 6a263aa1e7..3b383b684b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -24,7 +24,6 @@ import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -36,7 +35,6 @@ import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -179,6 +177,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .filter(dataEvt -> dataEvt != null) .flatMap(dataEvt -> dataEvt.getArtifacts().stream()) .forEach((art) -> { + // GVDTODO scope to data artifacts artifactTypeDataSourceMap .computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java new file mode 100644 index 0000000000..a61feefbda --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java @@ -0,0 +1,42 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * An event for an artifact added or changed of a particular type possibly for a + * particular data source. + */ +public class KeywordHitEvent extends AnalysisResultSetEvent { + private final String regex; + private final String match; + + KeywordHitEvent(String regex, String match, String setName, long artifactTypeId, long dataSourceId) { + super(setName, artifactTypeId, dataSourceId); + this.regex = regex; + this.match = match; + } + + public String getRegex() { + return regex; + } + + public String getMatch() { + return match; + } +} From 02d83c346dbd88c54455628597a5bf1faa68f774 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 08:12:28 -0500 Subject: [PATCH 025/142] cleanup TODO --- .../corecomponents/DataResultPanel.java | 34 +------- .../mainui/datamodel/DataArtifactDAO.java | 80 ++++++++++--------- 2 files changed, 43 insertions(+), 71 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index dc7e57a0b8..8362870c6b 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -140,21 +140,9 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PreferenceChangeListener pageSizeListener = (PreferenceChangeEvent evt) -> { if (evt.getKey().equals(UserPreferences.RESULTS_TABLE_PAGE_SIZE)) { - int newPageSize = UserPreferences.getResultsTablePageSize(); - nodeNameToPageCountListenerMap.values().forEach((ps) -> { ps.postPageSizeChangeEvent(); }); - -// try { -// if (this.searchResultManager != null) { -// DAOFetcher previousFetcher = this.searchResultManager.getDaoFetcher(); -// this.searchResultManager = new SearchManager(previousFetcher, newPageSize); -// displaySearchResults(this.searchResultManager.getResults(), false); -// } -// } catch (IllegalArgumentException | ExecutionException ex) { -// logger.log(Level.WARNING, "There was an error while updating page size", ex); -// } } }; @@ -162,10 +150,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PropertyChangeListener caseEventListener = evt -> { String evtName = evt.getPropertyName(); - if (Case.Events.DATA_SOURCE_ADDED.toString().equals(evtName)) { - // GVDTODO could potentially be removed -// refreshSearchResultChildren(); - } else if (Case.Events.CURRENT_CASE.toString().equals(evtName) && evt.getNewValue() == null) { + if (Case.Events.CURRENT_CASE.toString().equals(evtName) && evt.getNewValue() == null) { nodeNameToPageCountListenerMap.clear(); } }; @@ -191,21 +176,6 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PropertyChangeListener weakDAOListener = WeakListeners.propertyChange(DAOListener, mainDAO); - private static final Set INGEST_JOB_EVENTS = EnumSet.of( - IngestManager.IngestJobEvent.COMPLETED, - IngestManager.IngestJobEvent.CANCELLED); - - private final PropertyChangeListener ingestJobListener = (PropertyChangeEvent evt) -> { - // GVDTODO could potentially be removed -// String eventType = evt.getPropertyName(); -// if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) -// || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { -// refreshSearchResultChildren(); -// } - }; - - private final PropertyChangeListener weakIngestJobListener = WeakListeners.propertyChange(ingestJobListener, null); - /** * Creates and opens a Swing JPanel with a JTabbedPane child component that * contains instances of the result viewers (DataResultViewer) provided by @@ -471,7 +441,6 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this.weakCaseEventListener); this.mainDAO.addPropertyChangeListener(this.weakDAOListener); IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakDAOListener); - IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener); } /** @@ -481,7 +450,6 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C UserPreferences.removeChangeListener(this.pageSizeListener); Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), this.weakCaseEventListener); this.mainDAO.removePropertyChangeListener(this.weakDAOListener); - IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, weakIngestJobListener); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 6a263aa1e7..403494b102 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -24,7 +24,6 @@ import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -32,11 +31,12 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -49,16 +49,16 @@ import org.sleuthkit.datamodel.TskCoreException; * DAO for providing data about data artifacts to populate the results viewer. */ public class DataArtifactDAO extends BlackboardArtifactDAO { - + private static Logger logger = Logger.getLogger(DataArtifactDAO.class.getName()); - + private static DataArtifactDAO instance = null; - + synchronized static DataArtifactDAO getInstance() { if (instance == null) { instance = new DataArtifactDAO(); } - + return instance; } @@ -68,27 +68,27 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { public static Set getIgnoredTreeTypes() { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - + private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); - + private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); - + String pagedWhereClause = getWhereClause(cacheKey); - + List arts = new ArrayList<>(); arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause)); blackboard.loadBlackboardAttributes(arts); - + long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + TableData tableData = createTableData(artType, arts); return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } - + @Override RowDTO createRow(BlackboardArtifact artifact, Content srcContent, Content linkedFile, boolean isTimelineSupported, List cellValues, long id) throws IllegalArgumentException { if (!(artifact instanceof DataArtifact)) { @@ -96,25 +96,25 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } return new DataArtifactRowDTO((DataArtifact) artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id); } - + public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); - + if (artType == null || artType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT || (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0)) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Artifact type must be non-null and data artifact. Data source id must be null or > 0. " + "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); if (hardRefresh) { this.dataArtifactCache.invalidate(searchParams); } - + return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } - + public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { if (!(eventData instanceof DataArtifactEvent)) { return false; @@ -124,7 +124,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { && (key.getDataSourceId() == null || (key.getDataSourceId() == dataArtEvt.getDataSourceId())); } } - + public void dropDataArtifactCache() { dataArtifactCache.invalidateAll(); } @@ -159,17 +159,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { // return results return new TreeResultsDTO<>(treeItemRows); - + } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); } } - + @Override void clearCaches() { this.dataArtifactCache.invalidateAll(); } - + @Override List handleAutopsyEvent(Collection evts) { // get a grouping of artifacts mapping the artifact type id to data source id. @@ -179,25 +179,29 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .filter(dataEvt -> dataEvt != null) .flatMap(dataEvt -> dataEvt.getArtifacts().stream()) .forEach((art) -> { - artifactTypeDataSourceMap - .computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); + try { + if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { + artifactTypeDataSourceMap + .computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); + } }); - + // invalidate cache entries that are affected by events - // GVDTODO handle concurrency issues that may arise - List> invalidatedKeys = new ArrayList<>(); - for (SearchParams searchParams : this.dataArtifactCache.asMap().keySet()) { - Set dsIds = artifactTypeDataSourceMap.get(searchParams.getParamData().getArtifactType().getTypeID()); + ConcurrentMap, DataArtifactTableSearchResultsDTO> concurrentMap = this.dataArtifactCache.asMap(); + concurrentMap.forEach((k, v) -> { + Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType().getTypeID()); if (dsIds != null) { - Long searchDsId = searchParams.getParamData().getDataSourceId(); + Long searchDsId = k.getParamData().getDataSourceId(); if (searchDsId == null || dsIds.contains(searchDsId)) { - invalidatedKeys.add(searchParams); + concurrentMap.remove(k); } } - } - this.dataArtifactCache.invalidateAll(invalidatedKeys); - + }); + // gather dao events based on artifacts List toRet = new ArrayList<>(); for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { @@ -223,12 +227,12 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { public DataArtifactFetcher(DataArtifactSearchParam params) { super(params); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { return DataArtifactDAO.this.isDataArtifactInvalidating(this.getParameters(), evt); From 222e31c4e0f0be58feced977d6676d149c346dc1 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 11:29:44 -0500 Subject: [PATCH 026/142] views updates --- .../corecomponents/DataResultPanel.java | 4 +- .../mainui/datamodel/AnalysisResultDAO.java | 28 ++++--- .../mainui/datamodel/DAOEventUtils.java | 8 ++ .../datamodel/FileTypeExtensionsEvent.java | 74 +++++++++++++++++++ .../mainui/datamodel/FileTypeMimeEvent.java | 74 +++++++++++++++++++ .../mainui/datamodel/FileTypeSizeEvent.java | 73 ++++++++++++++++++ .../datamodel/FileTypeSizeSearchParams.java | 45 ----------- .../autopsy/mainui/datamodel/ViewsDAO.java | 60 +++++++++++++-- .../mainui/nodes/ViewsTypeFactory.java | 2 +- .../mainui/datamodel/TableSearchTest.java | 10 +-- 10 files changed, 307 insertions(+), 71 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java create mode 100755 Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java create mode 100755 Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 8362870c6b..6fa02f5202 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -1167,7 +1167,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) { try { - this.searchResultManager = new SearchManager(new AnalysisResultFetcher(analysisResultParams), getPageSize()); + this.searchResultManager = new SearchManager(MainDAO.getInstance().getAnalysisResultDAO().new AnalysisResultFetcher(analysisResultParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { @@ -1268,7 +1268,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayAnalysisResultSet(AnalysisResultSetSearchParam setKey) { try { - this.searchResultManager = new SearchManager(new AnalysisResultSetFetcher(setKey), getPageSize()); + this.searchResultManager = new SearchManager(MainDAO.getInstance().getAnalysisResultDAO().new AnalysisResultSetFetcher(setKey), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException | IllegalArgumentException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 172134d05a..aeba0be4ae 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -722,20 +722,24 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); if (dataEvt != null) { for (BlackboardArtifact art : dataEvt.getArtifacts()) { - if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { - // GVDTODO - } else if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() - || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() - || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()) { + try { + if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { + // GVDTODO + } else if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() + || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()) { - BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); - String setName = setAttr == null ? null : setAttr.getValueString(); - setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); + BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); + String setName = setAttr == null ? null : setAttr.getValueString(); + setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); - } else if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { - analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); + } else if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { + analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch necessary information for artifact id: " + art.getId(), ex); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java index c538ac0268..4cfc743dec 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java @@ -22,6 +22,7 @@ import java.beans.PropertyChangeEvent; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; /** @@ -55,6 +56,13 @@ public class DAOEventUtils { } } + static AbstractFile getFileFromEvt(PropertyChangeEvent evt) { + Content content = getContentFromEvt(evt); + return (content instanceof AbstractFile) + ? ((AbstractFile) content) + : null; + } + /** * Returns the ModuleDataEvent in the event if there is a child * ModuleDataEvent. If not, null is returned. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java new file mode 100644 index 0000000000..f81ae3f303 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java @@ -0,0 +1,74 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.util.Objects; + +/** + * An event where file type extensions could be affected. + */ +public class FileTypeExtensionsEvent { + + private final FileExtSearchFilter filter; + private final long dataSourceId; + + // TODO: This should ideally take in some kind of ENUM once we redo the tree. + // this assumes that filters implicitly or explicitly implement hashCode and equals to work + FileTypeExtensionsEvent(FileExtSearchFilter filter, long dataSourceId) { + this.filter = filter; + this.dataSourceId = dataSourceId; + } + + public FileExtSearchFilter getFilter() { + return filter; + } + + public long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 31 * hash + Objects.hashCode(this.filter); + hash = 31 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTypeExtensionsEvent other = (FileTypeExtensionsEvent) obj; + if (!Objects.equals(this.filter, other.filter)) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java new file mode 100755 index 0000000000..b135b46322 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java @@ -0,0 +1,74 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.util.Objects; + +/** + * An event pertaining to MIME types view from the DAO. + */ +public class FileTypeMimeEvent { + + private final String mimeType; + private final long dataSourceId; + + FileTypeMimeEvent(String mimeType, long dataSourceId) { + this.mimeType = mimeType; + this.dataSourceId = dataSourceId; + } + + public String getMimeType() { + return mimeType; + } + + public long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 29 * hash + Objects.hashCode(this.mimeType); + hash = 29 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTypeMimeEvent other = (FileTypeMimeEvent) obj; + if (!Objects.equals(this.mimeType, other.mimeType)) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java new file mode 100755 index 0000000000..0ae3354516 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java @@ -0,0 +1,73 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import java.util.Objects; + +/** + * Key for accessing data about file sizeFilter from the DAO. + */ +public class FileTypeSizeEvent { + + private final FileSizeFilter sizeFilter; + private final Long dataSourceId; + + FileTypeSizeEvent(FileSizeFilter sizeFilter, Long dataSourceId) { + this.sizeFilter = sizeFilter; + this.dataSourceId = dataSourceId; + } + + public FileSizeFilter getSizeFilter() { + return sizeFilter; + } + + public Long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 53 * hash + Objects.hashCode(this.sizeFilter); + hash = 53 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTypeSizeEvent other = (FileTypeSizeEvent) obj; + if (this.sizeFilter != other.sizeFilter) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java index c9bc49c5e6..1a1eb91917 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java @@ -25,51 +25,6 @@ import java.util.Objects; */ public class FileTypeSizeSearchParams { - public enum FileSizeFilter { - SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS - SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS - SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null); //NON-NLS - private final int id; - private final String name; - private final String displayName; - private long minBound; - private Long maxBound; - - private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) { - this.id = id; - this.name = name; - this.displayName = displayName; - this.minBound = minBound; - this.maxBound = maxBound; - } - - public String getName() { - return this.name; - } - - public int getId() { - return this.id; - } - - public String getDisplayName() { - return this.displayName; - } - - /** - * @return The minimum inclusive bound (non-null). - */ - public long getMinBound() { - return minBound; - } - - /** - * @return The maximum exclusive bound (if null, no upper limit). - */ - public Long getMaxBound() { - return maxBound; - } - - } private final FileSizeFilter sizeFilter; private final Long dataSourceId; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 1d8ea05ce5..1f9c0e3828 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -20,13 +20,16 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; +import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; @@ -285,7 +288,7 @@ public class ViewsDAO extends AbstractDAO { * * @return The clause to be proceeded with 'where' or 'and'. */ - private static String getFileSizeClause(FileTypeSizeSearchParams.FileSizeFilter filter) { + private static String getFileSizeClause(FileSizeFilter filter) { return filter.getMaxBound() == null ? "(size >= " + filter.getMinBound() + ")" : "(size >= " + filter.getMinBound() + " AND size < " + filter.getMaxBound() + ")"; @@ -313,7 +316,7 @@ public class ViewsDAO extends AbstractDAO { * * @return The clause to be proceeded with 'where' or 'and'. */ - private String getFileSizesWhereStatement(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId) { + private String getFileSizesWhereStatement(FileSizeFilter filter, Long dataSourceId) { String query = getBaseFileSizeFilter() + " AND " + getFileSizeClause(filter) + getDataSourceAndClause(dataSourceId); @@ -369,12 +372,12 @@ public class ViewsDAO extends AbstractDAO { * @throws ExecutionException */ public TreeResultsDTO getFileSizeCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException { - Map whereClauses = Stream.of(FileTypeSizeSearchParams.FileSizeFilter.values()) + Map whereClauses = Stream.of(FileSizeFilter.values()) .collect(Collectors.toMap( filter -> filter, filter -> getFileSizeClause(filter))); - Map countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true); + Map countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true); List> treeList = countsByFilter.entrySet().stream() .map(entry -> { @@ -596,7 +599,7 @@ public class ViewsDAO extends AbstractDAO { return fetchFileViewFiles(whereStatement, MIME_TYPE_DISPLAY_NAME, startItem, maxResultCount); } - private SearchResultsDTO fetchSizeSearchResultsDTOs(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException { + private SearchResultsDTO fetchSizeSearchResultsDTOs(FileSizeFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException { String whereStatement = getFileSizesWhereStatement(filter, dataSourceId); return fetchFileViewFiles(whereStatement, filter.getDisplayName(), startItem, maxResultCount); } @@ -641,6 +644,51 @@ public class ViewsDAO extends AbstractDAO { return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, startItem, totalResultsCount); } + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + + @Override + List handleAutopsyEvent(Collection autopsyEvts) { + Map> fileExtensionDsMap = new HashMap<>(); + Map> mimeTypeDsMap = new HashMap<>(); + Map> fileSizeDsMap = new HashMap<>(); + + for (PropertyChangeEvent evt : autopsyEvts) { + AbstractFile af = DAOEventUtils.getFileFromEvt(evt); + if (af == null) { + continue; + } + + if (!StringUtils.isBlank(af.getNameExtension())) { + fileExtensionDsMap + .computeIfAbsent(af.getNameExtension(), (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } + + if (!StringUtils.isBlank(af.getMIMEType())) { + mimeTypeDsMap + .computeIfAbsent(af.getMIMEType(), (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } + + FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values()) + .filter(filter -> af.getSize() >= filter.getMinBound() && af.getSize() < filter.getMaxBound()) + .findFirst() + .orElse(null); + + if (sizeFilter != null) { + fileSizeDsMap + .computeIfAbsent(sizeFilter, (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } + } + + + } + /** * Handles fetching and paging of data for file types by extension. */ @@ -727,7 +775,7 @@ public class ViewsDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { return true; - + // GVDTODO // Content content = DAOEventUtils.getContentFromEvt(evt); // if (content == null) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index a608827b24..215041601e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -33,7 +33,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileExtSearchFilter; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; -import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams.FileSizeFilter; +import org.sleuthkit.autopsy.mainui.datamodel.FileSizeFilter; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import org.sleuthkit.datamodel.AbstractFile; diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java index a96aac8bfb..ab057fe56e 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java @@ -672,31 +672,31 @@ public class TableSearchTest extends NbTestCase { ViewsDAO viewsDAO = MainDAO.getInstance().getViewsDAO(); // Get "50 - 200MB" files from data source 1 - FileTypeSizeSearchParams param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_50_200, dataSource1.getId()); + FileTypeSizeSearchParams param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_50_200, dataSource1.getId()); SearchResultsDTO results = viewsDAO.getFilesBySize(param, 0, null, false); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get "200MB - 1GB" files from data source 1 - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_200_1000, dataSource1.getId()); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_200_1000, dataSource1.getId()); results = viewsDAO.getFilesBySize(param, 0, null, false); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "200MB - 1GB" files from data source 2 - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_200_1000, dataSource2.getId()); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_200_1000, dataSource2.getId()); results = viewsDAO.getFilesBySize(param, 0, null, false); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get "1GB+" files from all data sources - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_1000_, null); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_1000_, null); results = viewsDAO.getFilesBySize(param, 0, null, false); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "50 - 200MB" files from all data sources - param = new FileTypeSizeSearchParams(FileTypeSizeSearchParams.FileSizeFilter.SIZE_50_200, null); + param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_50_200, null); results = viewsDAO.getFilesBySize(param, 0, null, false); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); From 401b42623c35b14126dfea337201c10d2c1d7ce6 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 11:29:57 -0500 Subject: [PATCH 027/142] extract file size filter --- .../mainui/datamodel/FileSizeFilter.java | 69 +++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java new file mode 100644 index 0000000000..246875fffb --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSizeFilter.java @@ -0,0 +1,69 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * Filters by file size for views. + */ +public enum FileSizeFilter { + SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS + SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS + SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null); + //NON-NLS + private final int id; + private final String name; + private final String displayName; + private long minBound; + private Long maxBound; + + private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) { + this.id = id; + this.name = name; + this.displayName = displayName; + this.minBound = minBound; + this.maxBound = maxBound; + } + + public String getName() { + return this.name; + } + + public int getId() { + return this.id; + } + + public String getDisplayName() { + return this.displayName; + } + + /** + * @return The minimum inclusive bound (non-null). + */ + public long getMinBound() { + return minBound; + } + + /** + * @return The maximum exclusive bound (if null, no upper limit). + */ + public Long getMaxBound() { + return maxBound; + } + +} From 76a6810799f279b9bd75a1b04c4a5942dfa87f63 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 11:30:44 -0500 Subject: [PATCH 028/142] additions --- .../autopsy/mainui/datamodel/ViewsDAO.java | 31 ++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 1f9c0e3828..da9da21cbe 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -30,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; @@ -686,7 +687,35 @@ public class ViewsDAO extends AbstractDAO { } } - + // invalidate cache entries that are affected by events + ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + Object baseParams = k.getParamData(); + if (baseParams instanceof FileTypeExtensionsSearchParams) { + FileTypeExtensionsSearchParams extParams = (FileTypeExtensionsSearchParams) baseParams; + + + } else if (baseParams instanceof FileTypeMimeSearchParams) { + FileTypeMimeSearchParams mimeParams = (FileTypeMimeSearchParams) baseParams; + + + } else if (baseParams instanceof FileTypeSizeSearchParams) { + FileTypeSizeSearchParams sizeParams = (FileTypeSizeSearchParams) baseParams; + + + } + + + + + Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType().getTypeID()); + if (dsIds != null) { + Long searchDsId = k.getParamData().getDataSourceId(); + if (searchDsId == null || dsIds.contains(searchDsId)) { + concurrentMap.remove(k); + } + } + }); } /** From aa7d009a56f6967e138cae0713b0872e290d16be Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 11:34:32 -0500 Subject: [PATCH 029/142] move packages --- .../sleuthkit/autopsy/corecomponents/DataResultPanel.java | 4 ++-- .../org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java | 1 + .../autopsy/mainui/datamodel/AnalysisResultDAO.java | 1 + .../sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java | 3 +++ .../src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java | 3 +++ .../sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java | 1 + .../src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java | 1 + .../org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java | 1 + .../datamodel/{ => events}/BlackboardArtifactEvent.java | 2 +- .../mainui/datamodel/{ => events}/DAOAggregateEvent.java | 4 ++-- .../autopsy/mainui/datamodel/{ => events}/DAOEvent.java | 2 +- .../mainui/datamodel/{ => events}/DAOEventBatcher.java | 6 +++--- .../mainui/datamodel/{ => events}/DAOEventUtils.java | 6 +++--- .../mainui/datamodel/{ => events}/DataArtifactEvent.java | 4 ++-- Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java | 2 +- .../org/sleuthkit/autopsy/mainui/nodes/SearchManager.java | 2 +- 16 files changed, 27 insertions(+), 16 deletions(-) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/BlackboardArtifactEvent.java (97%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/DAOAggregateEvent.java (91%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/DAOEvent.java (93%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/DAOEventBatcher.java (95%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/DAOEventUtils.java (92%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/DataArtifactEvent.java (87%) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 8362870c6b..e47dadde1f 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -68,8 +68,8 @@ import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSe import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; -import org.sleuthkit.autopsy.mainui.datamodel.DAOAggregateEvent; -import org.sleuthkit.autopsy.mainui.datamodel.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java index e18a2bc644..cc12d700df 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import java.beans.PropertyChangeEvent; import java.util.Collection; import java.util.List; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index f259f0b7e4..11f4b8620c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 403494b102..899e434a78 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -18,6 +18,9 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DataArtifactEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 72a98fb76c..1b684e99a5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -18,6 +18,9 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventBatcher; import com.google.common.collect.ImmutableList; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index a4f23ad9a0..9b4b2372a3 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index cfacaaca1b..bef3d85f85 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 454cf6ea27..cd659f9c63 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java similarity index 97% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java index 05f331fbc2..02fb5229c4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; /** * An event for an artifact added in a particular type. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java similarity index 91% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java index 78ed63ed05..329a3087bc 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOAggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.List; import org.apache.commons.collections4.list.UnmodifiableList; @@ -33,7 +33,7 @@ public class DAOAggregateEvent { * * @param objects The list of events in this aggregate event. */ - DAOAggregateEvent(List objects) { + public DAOAggregateEvent(List objects) { this.objects = UnmodifiableList.unmodifiableList(objects); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java similarity index 93% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java index 7a63cb70af..e2886275b6 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; /** * An event emitted by the DAO. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java similarity index 95% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java index 3f98b8c08a..69e472762e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventBatcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.util.ArrayList; @@ -29,7 +29,7 @@ import java.util.concurrent.TimeUnit; * * Handles refreshes in DAOs based on incoming events handling throttles */ -class DAOEventBatcher { +public class DAOEventBatcher { /** * The Refresher interface needs to be implemented by ChildFactory instances @@ -65,7 +65,7 @@ class DAOEventBatcher { * Queues an event to be fired as a part of a time-windowed batch. * @param event The event. */ - void queueEvent(T event) { + public void queueEvent(T event) { synchronized (this.eventListLock) { this.aggregateEvents.add(event); if (!this.isRunning) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java similarity index 92% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java index c538ac0268..1566a93f63 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DAOEventUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; import java.beans.PropertyChangeEvent; import org.sleuthkit.autopsy.ingest.IngestManager; @@ -39,7 +39,7 @@ public class DAOEventUtils { * * @return The inner content or null if no content. */ - static Content getContentFromEvt(PropertyChangeEvent evt) { + public static Content getContentFromEvt(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) && (evt.getOldValue() instanceof ModuleContentEvent) @@ -63,7 +63,7 @@ public class DAOEventUtils { * * @return The inner ModuleDataEvent or null. */ - static ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) { + public static ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName) && (evt.getOldValue() instanceof ModuleDataEvent)) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java similarity index 87% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java index 3403d9e3e6..18537de574 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; /** * An event for an artifact added or changed of a particular type possibly for a @@ -24,7 +24,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; */ public class DataArtifactEvent extends BlackboardArtifactEvent { - DataArtifactEvent(long artifactTypeId, long dataSourceId) { + public DataArtifactEvent(long artifactTypeId, long dataSourceId) { super(artifactTypeId, dataSourceId); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java index 43552ee959..9dd06f4428 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java @@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.beans.PropertyChangeEvent; import java.util.concurrent.ExecutionException; -import org.sleuthkit.autopsy.mainui.datamodel.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java index bf55366bbb..951c7979c8 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java @@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.text.MessageFormat; import java.util.concurrent.ExecutionException; -import org.sleuthkit.autopsy.mainui.datamodel.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; /** From fa257118105bf9a68344944d9f55c0a487492138 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 13:52:39 -0500 Subject: [PATCH 030/142] views updates --- .../mainui/datamodel/AnalysisResultDAO.java | 3 + .../autopsy/mainui/datamodel/ViewsDAO.java | 86 +++++++++++++++---- .../{ => events}/AnalysisResultEvent.java | 4 +- .../{ => events}/AnalysisResultSetEvent.java | 2 +- .../datamodel/events/DAOEventUtils.java | 2 +- .../{ => events}/FileTypeExtensionsEvent.java | 26 +++--- .../{ => events}/FileTypeMimeEvent.java | 6 +- .../{ => events}/FileTypeSizeEvent.java | 7 +- .../{ => events}/KeywordHitEvent.java | 4 +- 9 files changed, 96 insertions(+), 44 deletions(-) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/AnalysisResultEvent.java (87%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/AnalysisResultSetEvent.java (95%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/FileTypeExtensionsEvent.java (66%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/FileTypeMimeEvent.java (91%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/FileTypeSizeEvent.java (88%) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{ => events}/KeywordHitEvent.java (87%) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index f56153cbea..3468ff1544 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -18,6 +18,8 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import org.sleuthkit.autopsy.mainui.datamodel.events.AnalysisResultSetEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.AnalysisResultEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; @@ -44,6 +46,7 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AnalysisResult; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 02e4fcd4e1..7179a1f76d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -39,6 +39,7 @@ import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; @@ -47,6 +48,10 @@ import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTr import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeExtensionsEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeMimeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeSizeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement; @@ -651,11 +656,17 @@ public class ViewsDAO extends AbstractDAO { this.searchParamsCache.invalidateAll(); } + private Pair getMimePieces(String mimeType) { + int idx = mimeType.indexOf("/"); + String mimePrefix = idx > 0 ? mimeType.substring(0, idx) : mimeType; + String mimeSuffix = idx > 0 ? mimeType.substring(idx + 1) : null; + return Pair.of(mimePrefix, mimeSuffix); + } @Override List handleAutopsyEvent(Collection autopsyEvts) { Map> fileExtensionDsMap = new HashMap<>(); - Map> mimeTypeDsMap = new HashMap<>(); + Map>> mimeTypeDsMap = new HashMap<>(); Map> fileSizeDsMap = new HashMap<>(); for (PropertyChangeEvent evt : autopsyEvts) { @@ -671,8 +682,10 @@ public class ViewsDAO extends AbstractDAO { } if (!StringUtils.isBlank(af.getMIMEType())) { + Pair mimePieces = getMimePieces(af.getMIMEType()); mimeTypeDsMap - .computeIfAbsent(af.getMIMEType(), (k) -> new HashSet<>()) + .computeIfAbsent(mimePieces.getKey(), (k) -> new HashMap<>()) + .computeIfAbsent(mimePieces.getValue(), (k) -> new HashSet<>()) .add(af.getDataSourceObjectId()); } @@ -680,43 +693,78 @@ public class ViewsDAO extends AbstractDAO { .filter(filter -> af.getSize() >= filter.getMinBound() && af.getSize() < filter.getMaxBound()) .findFirst() .orElse(null); - + if (sizeFilter != null) { fileSizeDsMap .computeIfAbsent(sizeFilter, (k) -> new HashSet<>()) .add(af.getDataSourceObjectId()); } } - + // invalidate cache entries that are affected by events ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); concurrentMap.forEach((k, v) -> { Object baseParams = k.getParamData(); if (baseParams instanceof FileTypeExtensionsSearchParams) { FileTypeExtensionsSearchParams extParams = (FileTypeExtensionsSearchParams) baseParams; - - + boolean isMatch = extParams.getFilter().getFilter().stream().anyMatch((ext) -> { + Set dsIds = fileExtensionDsMap.get(ext); + return (dsIds != null && (extParams.getDataSourceId() == null || dsIds.contains(extParams.getDataSourceId()))); + }); + + if (isMatch) { + concurrentMap.remove(k); + } } else if (baseParams instanceof FileTypeMimeSearchParams) { FileTypeMimeSearchParams mimeParams = (FileTypeMimeSearchParams) baseParams; - - + Pair mimePieces = getMimePieces(mimeParams.getMimeType()); + Map> suffixes = mimeTypeDsMap.get(mimePieces.getKey()); + if (suffixes == null) { + return; + } + + if (mimePieces.getValue() == null + && (mimeParams.getDataSourceId() == null + || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> ds == mimeParams.getDataSourceId()))) { + + concurrentMap.remove(k); + } else { + Set dataSources = suffixes.get(mimePieces.getValue()); + if (dataSources != null && (mimeParams.getDataSourceId() == null || dataSources.contains(mimeParams.getDataSourceId()))) { + concurrentMap.remove(k); + } + } + } else if (baseParams instanceof FileTypeSizeSearchParams) { FileTypeSizeSearchParams sizeParams = (FileTypeSizeSearchParams) baseParams; - - - } - - - - - Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType().getTypeID()); - if (dsIds != null) { - Long searchDsId = k.getParamData().getDataSourceId(); - if (searchDsId == null || dsIds.contains(searchDsId)) { + Set dataSources = fileSizeDsMap.get(sizeParams.getSizeFilter()); + if (dataSources != null && (sizeParams.getDataSourceId() == null || dataSources.contains(sizeParams.getDataSourceId()))) { concurrentMap.remove(k); } } }); + + Stream fileExtStream = fileExtensionDsMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeExtensionsEvent(entry.getKey(), dsId))); + + List fileMimeList = new ArrayList<>(); + for (Entry>> prefixEntry : mimeTypeDsMap.entrySet()) { + String mimePrefix = prefixEntry.getKey(); + for (Entry> suffixEntry : prefixEntry.getValue().entrySet()) { + String mimeSuffix = suffixEntry.getKey(); + for (long dsId : suffixEntry.getValue()) { + String mimeType = mimePrefix + (mimeSuffix == null ? "" : ("/" + mimeSuffix)); + fileMimeList.add(new FileTypeMimeEvent(mimeType, dsId)); + } + } + } + + Stream fileSizeStream = fileSizeDsMap.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeSizeEvent(entry.getKey(), dsId))); + + return Stream.of(fileExtStream, fileMimeList.stream(), fileSizeStream) + .flatMap(stream -> stream) + .collect(Collectors.toList()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java similarity index 87% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java index edda77261f..9599d2d041 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; /** * An event for an artifact added or changed of a particular type possibly for a @@ -24,7 +24,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; */ public class AnalysisResultEvent extends BlackboardArtifactEvent { - AnalysisResultEvent(long artifactTypeId, long dataSourceId) { + public AnalysisResultEvent(long artifactTypeId, long dataSourceId) { super(artifactTypeId, dataSourceId); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java similarity index 95% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java index 453b0df8bd..22a6d7b87c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultSetEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; /** * An event for an artifact added or changed of a particular type possibly for a diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java index 96b969333e..c8f6fa5370 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java @@ -56,7 +56,7 @@ public class DAOEventUtils { } } - static AbstractFile getFileFromEvt(PropertyChangeEvent evt) { + public static AbstractFile getFileFromEvt(PropertyChangeEvent evt) { Content content = getContentFromEvt(evt); return (content instanceof AbstractFile) ? ((AbstractFile) content) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java similarity index 66% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java index f81ae3f303..4bc7db27f4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeExtensionsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java @@ -16,27 +16,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; /** * An event where file type extensions could be affected. */ -public class FileTypeExtensionsEvent { +public class FileTypeExtensionsEvent implements DAOEvent { - private final FileExtSearchFilter filter; + private final String extension; private final long dataSourceId; - // TODO: This should ideally take in some kind of ENUM once we redo the tree. - // this assumes that filters implicitly or explicitly implement hashCode and equals to work - FileTypeExtensionsEvent(FileExtSearchFilter filter, long dataSourceId) { - this.filter = filter; + public FileTypeExtensionsEvent(String extension, long dataSourceId) { + this.extension = extension; this.dataSourceId = dataSourceId; } - public FileExtSearchFilter getFilter() { - return filter; + public String getExtension() { + return extension; } public long getDataSourceId() { @@ -46,8 +44,8 @@ public class FileTypeExtensionsEvent { @Override public int hashCode() { int hash = 7; - hash = 31 * hash + Objects.hashCode(this.filter); - hash = 31 * hash + Objects.hashCode(this.dataSourceId); + hash = 59 * hash + Objects.hashCode(this.extension); + hash = 59 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32)); return hash; } @@ -63,12 +61,14 @@ public class FileTypeExtensionsEvent { return false; } final FileTypeExtensionsEvent other = (FileTypeExtensionsEvent) obj; - if (!Objects.equals(this.filter, other.filter)) { + if (this.dataSourceId != other.dataSourceId) { return false; } - if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + if (!Objects.equals(this.extension, other.extension)) { return false; } return true; } + + } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java similarity index 91% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java index b135b46322..4aae0476b1 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeMimeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java @@ -16,19 +16,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; /** * An event pertaining to MIME types view from the DAO. */ -public class FileTypeMimeEvent { +public class FileTypeMimeEvent implements DAOEvent { private final String mimeType; private final long dataSourceId; - FileTypeMimeEvent(String mimeType, long dataSourceId) { + public FileTypeMimeEvent(String mimeType, long dataSourceId) { this.mimeType = mimeType; this.dataSourceId = dataSourceId; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java similarity index 88% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java index 0ae3354516..d65aeb2788 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java @@ -16,19 +16,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; +import org.sleuthkit.autopsy.mainui.datamodel.FileSizeFilter; /** * Key for accessing data about file sizeFilter from the DAO. */ -public class FileTypeSizeEvent { +public class FileTypeSizeEvent implements DAOEvent { private final FileSizeFilter sizeFilter; private final Long dataSourceId; - FileTypeSizeEvent(FileSizeFilter sizeFilter, Long dataSourceId) { + public FileTypeSizeEvent(FileSizeFilter sizeFilter, Long dataSourceId) { this.sizeFilter = sizeFilter; this.dataSourceId = dataSourceId; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java similarity index 87% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java index a61feefbda..4d8ce6030e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordHitEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.mainui.datamodel; +package org.sleuthkit.autopsy.mainui.datamodel.events; /** * An event for an artifact added or changed of a particular type possibly for a @@ -26,7 +26,7 @@ public class KeywordHitEvent extends AnalysisResultSetEvent { private final String regex; private final String match; - KeywordHitEvent(String regex, String match, String setName, long artifactTypeId, long dataSourceId) { + public KeywordHitEvent(String regex, String match, String setName, long artifactTypeId, long dataSourceId) { super(setName, artifactTypeId, dataSourceId); this.regex = regex; this.match = match; From 2a199a64bbd4e95e6501db4500876e27f39efb44 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 14:02:12 -0500 Subject: [PATCH 031/142] event type --- .../org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java | 2 +- .../mainui/datamodel/events/BlackboardArtifactEvent.java | 5 ++++- .../sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java | 2 ++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java index cc12d700df..c12733d9c0 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java @@ -29,7 +29,7 @@ import java.util.List; abstract class AbstractDAO { /** - * Clear any cached data (Due to change in view + * Clear any cached data (Due to change in view). */ abstract void clearCaches(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java index 02fb5229c4..07231ce318 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java @@ -67,5 +67,8 @@ public class BlackboardArtifactEvent implements DAOEvent { return true; } - + @Override + public Type getType() { + return Type.RESULT; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java index e2886275b6..c4a1c3a3ca 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEvent.java @@ -22,5 +22,7 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; * An event emitted by the DAO. */ public interface DAOEvent { + public enum Type { TREE, RESULT } + DAOEvent.Type getType(); } From b665a0aacce6b616f81421e92cc980828578f7f4 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 14:05:43 -0500 Subject: [PATCH 032/142] retrack file --- .../mainui/datamodel/{AbstractDao.java => AbstractDAO.java} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/{AbstractDao.java => AbstractDAO.java} (100%) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java similarity index 100% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDao.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java From 5fb6bad1b175ce6124049b4c521887733e60d3c0 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 14:42:42 -0500 Subject: [PATCH 033/142] OS account work --- .../corecomponents/DataResultPanel.java | 6 +- .../mainui/datamodel/OsAccountsDAO.java | 45 +++++++++--- .../autopsy/mainui/datamodel/TagsDAO.java | 6 -- .../autopsy/mainui/datamodel/ViewsDAO.java | 71 +++++++------------ .../mainui/nodes/ViewsTypeFactory.java | 5 +- 5 files changed, 65 insertions(+), 68 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 7f4bae2d5a..0a220f3b64 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -1188,7 +1188,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C void displayFileExtensions(FileTypeExtensionsSearchParams fileExtensionsParams) { try { - this.searchResultManager = new SearchManager(new FileTypeExtFetcher(fileExtensionsParams), getPageSize()); + this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeExtFetcher(fileExtensionsParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { @@ -1209,7 +1209,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C void displayFileMimes(FileTypeMimeSearchParams fileMimeKey) { try { - this.searchResultManager = new SearchManager(new FileTypeMimeFetcher(fileMimeKey), getPageSize()); + this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeMimeFetcher(fileMimeKey), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException | IllegalArgumentException ex) { @@ -1248,7 +1248,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayFileSizes(FileTypeSizeSearchParams fileSizeKey) { try { - this.searchResultManager = new SearchManager(new FileTypeSizeFetcher(fileSizeKey), getPageSize()); + this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeSizeFetcher(fileSizeKey), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException | IllegalArgumentException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 9a96a3757b..7b8237fd65 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -24,18 +24,22 @@ import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; +import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.OsAccountEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.TskCoreException; @@ -75,6 +79,13 @@ public class OsAccountsDAO extends AbstractDAO { getFileColumnKey(Bundle.OsAccountsDAO_accountRealmNameProperty_displayName()), getFileColumnKey(Bundle.OsAccountsDAO_createdTimeProperty_displayName())); + private static final Set OS_EVENTS = ImmutableSet.of( + Case.Events.OS_ACCOUNTS_ADDED.toString(), + Case.Events.OS_ACCOUNTS_DELETED.toString(), + Case.Events.OS_ACCOUNTS_UPDATED.toString(), + Case.Events.OS_ACCT_INSTANCES_ADDED.toString() + ); + private static OsAccountsDAO instance = null; synchronized static OsAccountsDAO getInstance() { @@ -103,6 +114,10 @@ public class OsAccountsDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams)); } + + public boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) { + return DAOEvent instanceof OsAccountEvent; + } /** * Returns a list of paged OS Accounts results. @@ -167,10 +182,28 @@ public class OsAccountsDAO extends AbstractDAO { return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, 0, allAccounts.size()); } + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + @Override + List handleAutopsyEvent(Collection evts) { + List daoEvts = evts.stream().filter(evt -> OS_EVENTS.contains(evt.getPropertyName())) + .map(evt -> new OsAccountEvent()) + .collect(Collectors.toList()); + + if (!daoEvts.isEmpty()) { + this.searchParamsCache.invalidateAll(); + } + + return daoEvts; + } + /** * Handles fetching and paging of data for accounts. */ - public static class AccountFetcher extends DAOFetcher { + public class AccountFetcher extends DAOFetcher { /** * Main constructor. @@ -188,15 +221,7 @@ public class OsAccountsDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - return true; - - //GVDTODO -// String eventType = evt.getPropertyName(); -// if (eventType.equals(Case.Events.OS_ACCOUNTS_ADDED.toString()) -// || eventType.equals(Case.Events.OS_ACCOUNTS_DELETED.toString())) { -// return true; -// } -// return false; + return isOSAccountInvalidatingEvt(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 569c73a8e4..cd95d0b9dd 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.mainui.datamodel; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; -import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -35,13 +34,8 @@ import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent; -import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent; -import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; -import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; -import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifactTag; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 7179a1f76d..bd1c5bbcc7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -163,34 +163,35 @@ public class ViewsDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchSizeSearchResultsDTOs(key.getSizeFilter(), key.getDataSourceId(), startItem, maxCount)); } - public boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, Content eventData) { - if (!(eventData instanceof AbstractFile)) { + public boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, DAOEvent eventData) { + if (!(eventData instanceof FileTypeExtensionsEvent)) { return false; } - AbstractFile file = (AbstractFile) eventData; - String extension = "." + file.getNameExtension().toLowerCase(); - return key.getFilter().getFilter().contains(extension); + FileTypeExtensionsEvent extEvt = (FileTypeExtensionsEvent) eventData; + String extension = "." + extEvt.getExtension().toLowerCase(); + return key.getFilter().getFilter().contains(extension) + && (key.getDataSourceId() == null || key.getDataSourceId() == extEvt.getDataSourceId()); } - public boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, Content eventData) { - if (!(eventData instanceof AbstractFile)) { + public boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, DAOEvent eventData) { + if (!(eventData instanceof FileTypeMimeEvent)) { return false; } - AbstractFile file = (AbstractFile) eventData; - String mimeType = file.getMIMEType(); - return key.getMimeType().equalsIgnoreCase(mimeType); + FileTypeMimeEvent mimeEvt = (FileTypeMimeEvent) eventData; + return mimeEvt.getMimeType().startsWith(key.getMimeType()) + && (key.getDataSourceId() == null || key.getDataSourceId() == mimeEvt.getDataSourceId()); } - public boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, Content eventData) { - if (!(eventData instanceof AbstractFile)) { + public boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, DAOEvent eventData) { + if (!(eventData instanceof FileTypeSizeEvent)) { return false; } - long size = eventData.getSize(); - - return size >= key.getSizeFilter().getMinBound() && (key.getSizeFilter().getMaxBound() == null || size < key.getSizeFilter().getMaxBound()); + FileTypeSizeEvent sizeEvt = (FileTypeSizeEvent) eventData; + return sizeEvt.getSizeFilter().equals(key.getSizeFilter()) + && (key.getDataSourceId() == null || key.getDataSourceId() == sizeEvt.getDataSourceId()); } /** @@ -746,7 +747,7 @@ public class ViewsDAO extends AbstractDAO { Stream fileExtStream = fileExtensionDsMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeExtensionsEvent(entry.getKey(), dsId))); - + List fileMimeList = new ArrayList<>(); for (Entry>> prefixEntry : mimeTypeDsMap.entrySet()) { String mimePrefix = prefixEntry.getKey(); @@ -758,10 +759,10 @@ public class ViewsDAO extends AbstractDAO { } } } - + Stream fileSizeStream = fileSizeDsMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeSizeEvent(entry.getKey(), dsId))); - + return Stream.of(fileExtStream, fileMimeList.stream(), fileSizeStream) .flatMap(stream -> stream) .collect(Collectors.toList()); @@ -770,7 +771,7 @@ public class ViewsDAO extends AbstractDAO { /** * Handles fetching and paging of data for file types by extension. */ - public static class FileTypeExtFetcher extends DAOFetcher { + public class FileTypeExtFetcher extends DAOFetcher { /** * Main constructor. @@ -788,21 +789,14 @@ public class ViewsDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - return true; - // GVDTODO -// Content content = DAOEventUtils.getContentFromEvt(evt); -// if (content == null) { -// return false; -// } -// -// return MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating(this.getParameters(), content); + return isFilesByExtInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of data for file types by mime type. */ - public static class FileTypeMimeFetcher extends DAOFetcher { + public class FileTypeMimeFetcher extends DAOFetcher { /** * Main constructor. @@ -820,21 +814,14 @@ public class ViewsDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - return true; - // GVDTODO -// Content content = DAOEventUtils.getContentFromEvt(evt); -// if (content == null) { -// return false; -// } -// -// return MainDAO.getInstance().getViewsDAO().isFilesByMimeInvalidating(this.getParameters(), content); + return isFilesByMimeInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of data for file types by size. */ - public static class FileTypeSizeFetcher extends DAOFetcher { + public class FileTypeSizeFetcher extends DAOFetcher { /** * Main constructor. @@ -852,15 +839,7 @@ public class ViewsDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - return true; - - // GVDTODO -// Content content = DAOEventUtils.getContentFromEvt(evt); -// if (content == null) { -// return false; -// } -// -// return MainDAO.getInstance().getViewsDAO().isFilesBySizeInvalidating(this.getParameters(), content); + return isFilesBySizeInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index 215041601e..a186c0add0 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -305,9 +305,8 @@ public class ViewsTypeFactory { @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { AbstractFile file = getFileInDataSourceFromEvt(evt, this.dataSourceId); - return file != null && this.childFilters.stream() - .anyMatch((filter) -> MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating( - new FileTypeExtensionsSearchParams(filter, this.dataSourceId), file)); + return file != null && file.getNameExtension() != null && + this.childFilters.stream().anyMatch((filter) -> filter.getFilter().contains("." + file.getNameExtension().toLowerCase())); } /** From 2d1c86616784c53a585936194a95b762cad76a83 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 14:42:51 -0500 Subject: [PATCH 034/142] OS account work --- .../datamodel/events/OsAccountEvent.java | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java new file mode 100644 index 0000000000..648f924c53 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java @@ -0,0 +1,26 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +/** + * An event that OS Accounts were changed. + */ +public class OsAccountEvent implements DAOEvent { + +} From 2fe58bd0deae8ba7265657a29b98a102c23a2205 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 14:44:25 -0500 Subject: [PATCH 035/142] evt type fixes --- .../sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java | 2 +- .../mainui/datamodel/events/FileTypeExtensionsEvent.java | 7 +++++-- .../autopsy/mainui/datamodel/events/FileTypeMimeEvent.java | 7 +++++-- .../autopsy/mainui/datamodel/events/FileTypeSizeEvent.java | 4 ++++ .../autopsy/mainui/datamodel/events/OsAccountEvent.java | 6 +++++- 5 files changed, 20 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 7b8237fd65..19e1ceb5b8 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -116,7 +116,7 @@ public class OsAccountsDAO extends AbstractDAO { } public boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) { - return DAOEvent instanceof OsAccountEvent; + return evt instanceof OsAccountEvent; } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java index 4bc7db27f4..630bdc67d0 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java @@ -69,6 +69,9 @@ public class FileTypeExtensionsEvent implements DAOEvent { } return true; } - - + + @Override + public Type getType() { + return Type.RESULT; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java index 4aae0476b1..96b884a432 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeMimeEvent.java @@ -27,7 +27,7 @@ public class FileTypeMimeEvent implements DAOEvent { private final String mimeType; private final long dataSourceId; - + public FileTypeMimeEvent(String mimeType, long dataSourceId) { this.mimeType = mimeType; this.dataSourceId = dataSourceId; @@ -70,5 +70,8 @@ public class FileTypeMimeEvent implements DAOEvent { return true; } - + @Override + public Type getType() { + return Type.RESULT; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java index d65aeb2788..71cfc3c1d5 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java @@ -71,4 +71,8 @@ public class FileTypeSizeEvent implements DAOEvent { return true; } + @Override + public Type getType() { + return Type.RESULT; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java index 648f924c53..50805b16b8 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/OsAccountEvent.java @@ -22,5 +22,9 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; * An event that OS Accounts were changed. */ public class OsAccountEvent implements DAOEvent { - + + @Override + public Type getType() { + return Type.RESULT; + } } From 3e31edd2e371476ea505465f0a1996681d1c1f0d Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 15:47:08 -0500 Subject: [PATCH 036/142] tags events --- .../autopsy/mainui/datamodel/TagsDAO.java | 244 +++++++++++------- .../mainui/datamodel/events/TagsEvent.java | 92 +++++++ 2 files changed, 247 insertions(+), 89 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index cd95d0b9dd..cdc2d4db7f 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -21,21 +21,37 @@ package org.sleuthkit.autopsy.mainui.datamodel; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; +import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.tuple.Triple; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent; +import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; +import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType; +import org.sleuthkit.autopsy.mainui.datamodel.events.TagsEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifactTag; @@ -47,7 +63,7 @@ import org.sleuthkit.datamodel.TskCoreException; /** * Provides information to populate the results viewer for data in the allTags - section. + * section. */ @Messages({"TagsDAO.fileColumns.nameColLbl=Name", "TagsDAO.fileColumns.originalName=Original Name", @@ -71,11 +87,11 @@ public class TagsDAO extends AbstractDAO { private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; - private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); - + private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; + private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private static final String USER_NAME_PROPERTY = "user.name"; //NON-NLS - + private static final List FILE_TAG_COLUMNS = Arrays.asList( getFileColumnKey(Bundle.TagsDAO_fileColumns_nameColLbl()), getFileColumnKey(Bundle.TagsDAO_fileColumns_originalName()), // GVDTODO handle translation @@ -110,7 +126,7 @@ public class TagsDAO extends AbstractDAO { private static ColumnKey getFileColumnKey(String name) { return new ColumnKey(name, name, Bundle.TagsDAO_fileColumns_noDescription()); } - + public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { if (key.getTagName() == null) { throw new IllegalArgumentException("Must have non-null tag name"); @@ -119,17 +135,17 @@ public class TagsDAO extends AbstractDAO { } else if (key.getTagType() == null) { throw new IllegalArgumentException("Must have non-null tag type"); } - + SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); if (hardRefresh) { this.searchParamsCache.invalidate(searchParams); } return searchParamsCache.get(searchParams, () -> fetchTagsDTOs(searchParams)); - } + } @NbBundle.Messages({"FileTag.name.text=File Tag", - "ResultTag.name.text=Result Tag"}) + "ResultTag.name.text=Result Tag"}) private SearchResultsDTO fetchTagsDTOs(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { switch (cacheKey.getParamData().getTagType()) { case FILE: @@ -140,7 +156,7 @@ public class TagsDAO extends AbstractDAO { throw new IllegalArgumentException("Unsupported tag type"); } } - + /** * Returns a list of paged tag results. * @@ -165,7 +181,7 @@ public class TagsDAO extends AbstractDAO { Long dataSourceId = cacheKey.getParamData().getDataSourceId(); TagName tagName = cacheKey.getParamData().getTagName(); - + // get all tag results List allTags = new ArrayList<>(); List artifactTags = (dataSourceId != null && dataSourceId > 0) @@ -181,21 +197,21 @@ public class TagsDAO extends AbstractDAO { } else { allTags.addAll(artifactTags); } - + // get current page of tag results List pagedTags = getPaged(allTags, cacheKey); List fileRows = new ArrayList<>(); for (Tag tag : pagedTags) { BlackboardArtifactTag blackboardTag = (BlackboardArtifactTag) tag; - + String name = blackboardTag.getContent().getName(); // As a backup. try { name = blackboardTag.getArtifact().getShortDescription(); } catch (TskCoreException ignore) { // it's a WARNING, skip } - + String contentPath; try { contentPath = blackboardTag.getContent().getUniquePath(); @@ -218,12 +234,12 @@ public class TagsDAO extends AbstractDAO { return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size()); } - + private SearchResultsDTO fetchFileTags(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { Long dataSourceId = cacheKey.getParamData().getDataSourceId(); TagName tagName = cacheKey.getParamData().getTagName(); - + // get all tag results List allTags = new ArrayList<>(); List contentTags = (dataSourceId != null && dataSourceId > 0) @@ -239,10 +255,10 @@ public class TagsDAO extends AbstractDAO { } else { allTags.addAll(contentTags); } - + // get current page of tag results List pagedTags = getPaged(allTags, cacheKey); - + List fileRows = new ArrayList<>(); for (Tag tag : pagedTags) { ContentTag contentTag = (ContentTag) tag; @@ -271,11 +287,129 @@ public class TagsDAO extends AbstractDAO { return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size()); } - + + /** + * Returns true if the DAO event could have an impact on the given search + * params. + * + * @param tagParams The tag params. + * @param daoEvt The DAO event. + * + * @return True if the event could affect the results of the search params. + */ + public boolean isTagsInvalidatingEvent(TagsSearchParams tagParams, DAOEvent daoEvt) { + if (!(daoEvt instanceof TagsEvent)) { + return false; + } + + TagsEvent tagEvt = (TagsEvent) daoEvt; + return (tagParams.getTagName().getId() == tagEvt.getTagNameId() + && tagParams.getTagType().equals(tagEvt.getTagType()) + && (tagParams.getDataSourceId() == null + || tagEvt.getDataSourceId() == null + || tagParams.getDataSourceId() == tagEvt.getDataSourceId())); + } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + @Override + List handleAutopsyEvent(Collection evts) { + Map, Set>> mapping = new HashMap<>(); + for (PropertyChangeEvent evt : evts) { + // tag type, tag name id, data source id (or null if unknown) + Triple data = getTagData(evt); + if (data != null) { + mapping.computeIfAbsent(Pair.of(data.getLeft(), data.getMiddle()), k -> new HashSet<>()) + .add(Optional.ofNullable(data.getRight())); + } + } + + ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + TagsSearchParams paramData = k.getParamData(); + Set> affectedDataSources = mapping.get(Pair.of(paramData.getTagType(), paramData.getTagName().getId())); + // we only clear key if the tag name / type line up and either the parameters data source wasn't specified, + // there is a wild card data source for the event, or the data source is contained in the list of data sources + // affected by the event + if (affectedDataSources != null + && (paramData.getDataSourceId() == null + || affectedDataSources.contains(Optional.empty()) + || affectedDataSources.contains(Optional.of(paramData.getDataSourceId())))) { + concurrentMap.remove(k); + } + }); + + return mapping.entrySet().stream() + .flatMap(entry -> { + TagType tagType = entry.getKey().getLeft(); + Long tagNameId = entry.getKey().getRight(); + + return entry.getValue().stream() + .map((dsIdOpt) -> new TagsEvent(tagType, tagNameId, dsIdOpt.orElse(null))); + }) + .collect(Collectors.toList()); + } + + /** + * Returns tag information from an event or null if no tag information + * found. + * + * @param evt The autopsy event. + * + * @return tag type, tag name id, data source id (or null if none determined + * from event). + */ + private Triple getTagData(PropertyChangeEvent evt) { + + String eventType = evt.getPropertyName(); + + if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString()) + || eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString()) + || eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString()) + || eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { + + if (evt instanceof BlackBoardArtifactTagAddedEvent) { + BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; + // ensure tag added event has a valid content id + if (event.getAddedTag() != null + && event.getAddedTag().getContent() != null + && event.getAddedTag().getArtifact() != null) { + return Triple.of(TagType.RESULT, event.getAddedTag().getName().getId(), event.getAddedTag().getArtifact().getDataSourceObjectID()); + } + + } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { + BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; + BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); + if (deletedTagInfo != null) { + return Triple.of(TagType.RESULT, deletedTagInfo.getName().getId(), null); + } + } else if (evt instanceof ContentTagAddedEvent) { + ContentTagAddedEvent event = (ContentTagAddedEvent) evt; + // ensure tag added event has a valid content id + if (event.getAddedTag() != null && event.getAddedTag().getContent() != null) { + Content content = event.getAddedTag().getContent(); + Long dsId = content instanceof AbstractFile ? ((AbstractFile) content).getDataSourceObjectId() : null; + return Triple.of(TagType.FILE, event.getAddedTag().getName().getId(), dsId); + } + } else if (evt instanceof ContentTagDeletedEvent) { + ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; + // ensure tag deleted event has a valid content id + ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); + if (deletedTagInfo != null) { + return Triple.of(TagType.FILE, deletedTagInfo.getName().getId(), null); + } + } + } + return null; + } + /** * Handles fetching and paging of data for allTags. */ - public static class TagFetcher extends DAOFetcher { + public class TagFetcher extends DAOFetcher { /** * Main constructor. @@ -293,75 +427,7 @@ public class TagsDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - return true; - - // GVDTODO - -// TagsSearchParams params = this.getParameters(); -// String eventType = evt.getPropertyName(); -// -// // handle artifact/result tag changes -// if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString()) -// || eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString())) { -// -// // ignore non-artifact/result tag changes -// if (params.getTagType() != TagsSearchParams.TagType.RESULT) { -// return false; -// } -// -// if (evt instanceof AutopsyEvent) { -// if (evt instanceof BlackBoardArtifactTagAddedEvent) { -// // An artifact associated with the current case has been tagged. -// BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; -// // ensure tag added event has a valid content id -// if (event.getAddedTag() == null || event.getAddedTag().getContent() == null || event.getAddedTag().getArtifact() == null) { -// return false; -// } -// return params.getTagName().getId() == event.getAddedTag().getId(); -// } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { -// // A tag has been removed from an artifact associated with the current case. -// BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; -// // ensure tag deleted event has a valid content id -// BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); -// if (deletedTagInfo == null) { -// return false; -// } -// return params.getTagName().getId() == deletedTagInfo.getTagID(); -// } -// } -// } -// -// // handle file/content tag changes -// if (eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString()) -// || eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { -// -// // ignore non-file/content tag changes -// if (params.getTagType() != TagsSearchParams.TagType.FILE) { -// return false; -// } -// -// if (evt instanceof AutopsyEvent) { -// if (evt instanceof ContentTagAddedEvent) { -// // Content associated with the current case has been tagged. -// ContentTagAddedEvent event = (ContentTagAddedEvent) evt; -// // ensure tag added event has a valid content id -// if (event.getAddedTag() == null || event.getAddedTag().getContent() == null) { -// return false; -// } -// return params.getTagName().getId() == event.getAddedTag().getId(); -// } else if (evt instanceof ContentTagDeletedEvent) { -// // A tag has been removed from content associated with the current case. -// ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; -// // ensure tag deleted event has a valid content id -// ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); -// if (deletedTagInfo == null) { -// return false; -// } -// return params.getTagName().getId() == deletedTagInfo.getTagID(); -// } -// } -// } -// return false; + return isTagsInvalidatingEvent(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java new file mode 100755 index 0000000000..12cfc26d64 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java @@ -0,0 +1,92 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType; + +/** + * An event affecting tags + */ +public class TagsEvent implements DAOEvent { + + private final TagType type; + private final Long tagNameId; + private final Long dataSourceId; + + public TagsEvent(TagType type, Long tagNameId, Long dataSourceId) { + this.type = type; + this.tagNameId = tagNameId; + this.dataSourceId = dataSourceId; + } + + public TagType getTagType() { + return type; + } + + public Long getTagNameId() { + return tagNameId; + } + + /** + * @return The data source object id for the tag. Is null if cannot be + * determined. + */ + public Long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 97 * hash + Objects.hashCode(this.type); + hash = 97 * hash + Objects.hashCode(this.tagNameId); + hash = 97 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final TagsEvent other = (TagsEvent) obj; + if (this.type != other.type) { + return false; + } + if (!Objects.equals(this.tagNameId, other.tagNameId)) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} From c66053c879224636cd781e67b42407bddee26f42 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Thu, 11 Nov 2021 16:52:33 -0500 Subject: [PATCH 037/142] initial file system DAO work --- .../mainui/datamodel/FileSystemDAO.java | 103 +++++++++++++----- 1 file changed, 78 insertions(+), 25 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index c31f468565..787a0dc5e4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -20,16 +20,24 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; +import com.google.common.collect.ImmutableSet; +import java.beans.PropertyChangeEvent; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Person; @@ -40,13 +48,14 @@ import org.sleuthkit.datamodel.TskCoreException; * */ public class FileSystemDAO extends AbstractDAO { + private static final int CACHE_SIZE = 15; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); private static final String FILE_SYSTEM_TYPE_ID = "FILE_SYSTEM"; - + private static FileSystemDAO instance = null; synchronized static FileSystemDAO getInstance() { @@ -55,7 +64,7 @@ public class FileSystemDAO extends AbstractDAO { } return instance; } - + private BaseSearchResultsDTO fetchContentForTableFromContent(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); @@ -67,15 +76,15 @@ public class FileSystemDAO extends AbstractDAO { if (parentContent == null) { throw new TskCoreException("Error loading children of object with ID " + objectId); } - + parentName = parentContent.getName(); for (Content content : parentContent.getChildren()) { contentForTable.addAll(FileSystemColumnUtils.getNextDisplayableContent(content)); - } + } return fetchContentForTable(cacheKey, contentForTable, parentName); } - + private BaseSearchResultsDTO fetchContentForTableFromHost(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); @@ -91,8 +100,8 @@ public class FileSystemDAO extends AbstractDAO { throw new TskCoreException("Error loading host with ID " + objectId); } return fetchContentForTable(cacheKey, contentForTable, parentName); - } - + } + private BaseSearchResultsDTO fetchHostsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); @@ -100,7 +109,7 @@ public class FileSystemDAO extends AbstractDAO { Long objectId = cacheKey.getParamData().getPersonObjectId(); List hostsForTable = new ArrayList<>(); String parentName = ""; - + if (objectId != null) { Optional person = skCase.getPersonManager().getPerson(objectId); if (person.isPresent()) { @@ -112,10 +121,10 @@ public class FileSystemDAO extends AbstractDAO { } else { hostsForTable.addAll(skCase.getPersonManager().getHostsWithoutPersons()); } - + Stream pagedHostsStream = hostsForTable.stream() - .sorted(Comparator.comparing((host) -> host.getHostId())) - .skip(cacheKey.getStartItem()); + .sorted(Comparator.comparing((host) -> host.getHostId())) + .skip(cacheKey.getStartItem()); if (cacheKey.getMaxResultsCount() != null) { pagedHostsStream = pagedHostsStream.limit(cacheKey.getMaxResultsCount()); @@ -123,37 +132,36 @@ public class FileSystemDAO extends AbstractDAO { List pagedHosts = pagedHostsStream.collect(Collectors.toList()); List columnKeys = FileSystemColumnUtils.getColumnKeysForHost(); - + List rows = new ArrayList<>(); for (Host host : pagedHosts) { List cellValues = FileSystemColumnUtils.getCellValuesForHost(host); rows.add(new BaseRowDTO(cellValues, FILE_SYSTEM_TYPE_ID, host.getHostId())); } return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), hostsForTable.size()); - } - - + } + private BaseSearchResultsDTO fetchContentForTable(SearchParams cacheKey, List contentForTable, String parentName) throws NoCurrentCaseException, TskCoreException { // Ensure consistent columns for each page by doing this before paging List displayableTypes = FileSystemColumnUtils.getDisplayableTypesForContentList(contentForTable); - + List pagedContent = getPaged(contentForTable, cacheKey); List columnKeys = FileSystemColumnUtils.getColumnKeysForContent(displayableTypes); - + List rows = new ArrayList<>(); for (Content content : pagedContent) { List cellValues = FileSystemColumnUtils.getCellValuesForContent(content, displayableTypes); rows.add(new BaseRowDTO(cellValues, FILE_SYSTEM_TYPE_ID, content.getId())); } return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), contentForTable.size()); - } - + } + /** * Returns a list of paged content. * - * @param contentObjects The content objects. - * @param searchParams The search parameters including the paging. + * @param contentObjects The content objects. + * @param searchParams The search parameters including the paging. * * @return The list of paged content. */ @@ -167,8 +175,8 @@ public class FileSystemDAO extends AbstractDAO { } return pagedArtsStream.collect(Collectors.toList()); - } - + } + public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); @@ -178,7 +186,7 @@ public class FileSystemDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchContentForTableFromContent(searchParams)); } - + public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); @@ -188,7 +196,7 @@ public class FileSystemDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchContentForTableFromHost(searchParams)); } - + public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); @@ -198,4 +206,49 @@ public class FileSystemDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams)); } + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll(); + } + + private static final Set DATA_SOURCE_EVTS = ImmutableSet.of( + Case.Events.DATA_SOURCE_ADDED.toString(), + Case.Events.DATA_SOURCE_DELETED.toString(), + Case.Events.DATA_SOURCE_NAME_CHANGED.toString() + ); + + private static final Set HOST_EVTS = ImmutableSet.of( + Case.Events.HOSTS_ADDED.toString(), + Case.Events.HOSTS_ADDED_TO_PERSON.toString(), + Case.Events.HOSTS_DELETED.toString(), + Case.Events.HOSTS_REMOVED_FROM_PERSON.toString(), + Case.Events.HOSTS_UPDATED.toString() + ); + + @Override + List handleAutopsyEvent(Collection evts) { +// Set affectedPersons = new HashSet<>(); +// Set affectedHosts = new HashSet<>(); +// Set affectedParentContent = new HashSet<>(); +// +// for (PropertyChangeEvent evt : evts) { +// Content content = DAOEventUtils.getContentFromEvt(evt); +// if (content != null) { +// affectedParentContent.add(content.getParentId()); +// continue; +// } +// +// String propName = evt.getPropertyName(); +// if (DATA_SOURCE_EVTS.contains(propName)) { +// affectedHosts.add(evt.getHostId()); +// } else if (HOST_EVTS.contains(propName)) { +// affectedPersons.add(evt.getPersonId()); +// } +// } + + // GVDTODO clear affected cache entries + // GVDTODO generate events + return Collections.emptyList(); + } } From e4da6ad546a610260b9452d8033a31ad1287b309 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 12 Nov 2021 09:04:14 -0500 Subject: [PATCH 038/142] fix --- .../corecomponents/DataResultPanel.java | 11 ++-- .../mainui/datamodel/AnalysisResultDAO.java | 23 ++++--- .../mainui/datamodel/DataArtifactDAO.java | 10 +-- .../mainui/datamodel/FileSystemDAO.java | 61 ++++++++++--------- .../mainui/datamodel/OsAccountsDAO.java | 9 ++- .../autopsy/mainui/datamodel/TagsDAO.java | 9 ++- .../autopsy/mainui/datamodel/ViewsDAO.java | 25 +++++--- 7 files changed, 87 insertions(+), 61 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 88f0d81d10..9d742d0957 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -1154,8 +1154,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayDataArtifact(DataArtifactSearchParam dataArtifactParams) { try { - DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO(); - this.searchResultManager = new SearchManager(dataArtDAO.new DataArtifactFetcher(dataArtifactParams), getPageSize()); + this.searchResultManager = new SearchManager(new DataArtifactFetcher(dataArtifactParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { @@ -1169,7 +1168,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) { try { - this.searchResultManager = new SearchManager(MainDAO.getInstance().getAnalysisResultDAO().new AnalysisResultFetcher(analysisResultParams), getPageSize()); + this.searchResultManager = new SearchManager(new AnalysisResultFetcher(analysisResultParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { @@ -1190,7 +1189,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C void displayFileExtensions(FileTypeExtensionsSearchParams fileExtensionsParams) { try { - this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeExtFetcher(fileExtensionsParams), getPageSize()); + this.searchResultManager = new SearchManager(new FileTypeExtFetcher(fileExtensionsParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { @@ -1211,7 +1210,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C void displayFileMimes(FileTypeMimeSearchParams fileMimeKey) { try { - this.searchResultManager = new SearchManager(MainDAO.getInstance().getViewsDAO().new FileTypeMimeFetcher(fileMimeKey), getPageSize()); + this.searchResultManager = new SearchManager(new FileTypeMimeFetcher(fileMimeKey), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException | IllegalArgumentException ex) { @@ -1270,7 +1269,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayAnalysisResultSet(AnalysisResultSetSearchParam setKey) { try { - this.searchResultManager = new SearchManager(MainDAO.getInstance().getAnalysisResultDAO().new AnalysisResultSetFetcher(setKey), getPageSize()); + this.searchResultManager = new SearchManager(new AnalysisResultSetFetcher(setKey), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException | IllegalArgumentException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 3468ff1544..9050e45385 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -766,8 +766,9 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { /** * Handles fetching and paging of analysis results. */ - public class AnalysisResultFetcher extends DAOFetcher { - + public static class AnalysisResultFetcher extends DAOFetcher { + private final AnalysisResultDAO dao; + /** * Main constructor. * @@ -775,23 +776,25 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public AnalysisResultFetcher(AnalysisResultSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getAnalysisResultDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return AnalysisResultDAO.this.isAnalysisResultsInvalidating(this.getParameters(), evt); + return dao.isAnalysisResultsInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of hashset hits. */ - public class AnalysisResultSetFetcher extends DAOFetcher { + public static class AnalysisResultSetFetcher extends DAOFetcher { + private final AnalysisResultDAO dao; /** * Main constructor. @@ -800,16 +803,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public AnalysisResultSetFetcher(AnalysisResultSetSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getAnalysisResultDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return AnalysisResultDAO.this.isAnalysisResultsSetInvalidating(this.getParameters(), evt); + return dao.isAnalysisResultsSetInvalidating(this.getParameters(), evt); } } @@ -818,6 +822,8 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public static class KeywordHitResultFetcher extends DAOFetcher { + private final AnalysisResultDAO dao; + /** * Main constructor. * @@ -825,11 +831,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public KeywordHitResultFetcher(KeywordHitSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getAnalysisResultDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getAnalysisResultDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 899e434a78..e374c0e929 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -220,8 +220,9 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { /* * Handles fetching and paging of data artifacts. */ - public class DataArtifactFetcher extends DAOFetcher { - + public static class DataArtifactFetcher extends DAOFetcher { + private final DataArtifactDAO dao; + /** * Main constructor. * @@ -229,16 +230,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { */ public DataArtifactFetcher(DataArtifactSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getDataArtifactsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return DataArtifactDAO.this.isDataArtifactInvalidating(this.getParameters(), evt); + return dao.isDataArtifactInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index b6bd53ef03..ac46570b5d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -87,27 +87,25 @@ public class FileSystemDAO extends AbstractDAO { } return instance; } - - // public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, Content eventContent) { - // if(!(eventContent instanceof Content)) { - // return false; - // } - - // try { - // return key.getContentObjectId() != eventContent.getParent().getId(); - // } catch (TskCoreException ex) { - // // There is nothing we can do with the exception. - // return false; - // } - // } - - // public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, Host eventHost) { - // if(!(eventHost instanceof Host)) { - // return false; - // } - - // return key.getHostObjectId() != eventHost.getHostId(); - // } + + public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, Content eventContent) { + if (!(eventContent instanceof Content)) { + return false; + } + try { + return key.getContentObjectId() != eventContent.getParent().getId(); + } catch (TskCoreException ex) { + // There is nothing we can do with the exception. + return false; + } + } + + public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, Host eventHost) { + if (!(eventHost instanceof Host)) { + return false; + } + return key.getHostObjectId() != eventHost.getHostId(); + } private BaseSearchResultsDTO fetchContentForTableFromContent(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -310,10 +308,10 @@ public class FileSystemDAO extends AbstractDAO { ); private static final Set HOST_EVTS = ImmutableSet.of( - Case.Events.HOSTS_ADDED.toString(), - Case.Events.HOSTS_ADDED_TO_PERSON.toString(), - Case.Events.HOSTS_DELETED.toString(), - Case.Events.HOSTS_REMOVED_FROM_PERSON.toString(), + Case.Events.HOSTS_ADDED.toString(), + Case.Events.HOSTS_ADDED_TO_PERSON.toString(), + Case.Events.HOSTS_DELETED.toString(), + Case.Events.HOSTS_REMOVED_FROM_PERSON.toString(), Case.Events.HOSTS_UPDATED.toString() ); @@ -337,15 +335,19 @@ public class FileSystemDAO extends AbstractDAO { // affectedPersons.add(evt.getPersonId()); // } // } - + // GVDTODO clear affected cache entries // GVDTODO generate events return Collections.emptyList(); + } + /** * Handles fetching and paging of data for file types by mime type. */ public static class FileSystemFetcher extends DAOFetcher { + private final FileSystemDAO dao; + /** * Main constructor. * @@ -353,16 +355,17 @@ public class FileSystemDAO extends AbstractDAO { */ public FileSystemFetcher(FileSystemContentSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getFileSystemDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getFileSystemDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return this.dao.getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - Content content = getContentFromEvt(evt); + public boolean isRefreshRequired(DAOEvent evt) { + return this.dao.Content content = getContentFromEvt(evt); if (content == null) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 19e1ceb5b8..d8570472a9 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -203,7 +203,9 @@ public class OsAccountsDAO extends AbstractDAO { /** * Handles fetching and paging of data for accounts. */ - public class AccountFetcher extends DAOFetcher { + public static class AccountFetcher extends DAOFetcher { + + private final OsAccountsDAO dao; /** * Main constructor. @@ -212,16 +214,17 @@ public class OsAccountsDAO extends AbstractDAO { */ public AccountFetcher(OsAccountsSearchParams params) { super(params); + this.dao = MainDAO.getInstance().getOsAccountsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getOsAccountsDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return isOSAccountInvalidatingEvt(this.getParameters(), evt); + return dao.isOSAccountInvalidatingEvt(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index cdc2d4db7f..192fca67d5 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -409,8 +409,10 @@ public class TagsDAO extends AbstractDAO { /** * Handles fetching and paging of data for allTags. */ - public class TagFetcher extends DAOFetcher { + public static class TagFetcher extends DAOFetcher { + private final TagsDAO dao; + /** * Main constructor. * @@ -418,16 +420,17 @@ public class TagsDAO extends AbstractDAO { */ public TagFetcher(TagsSearchParams params) { super(params); + this.dao = MainDAO.getInstance().getTagsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getTagsDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return isTagsInvalidatingEvent(this.getParameters(), evt); + return dao.isTagsInvalidatingEvent(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index bd1c5bbcc7..dd2f73a97e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -771,7 +771,9 @@ public class ViewsDAO extends AbstractDAO { /** * Handles fetching and paging of data for file types by extension. */ - public class FileTypeExtFetcher extends DAOFetcher { + public static class FileTypeExtFetcher extends DAOFetcher { + + private final ViewsDAO dao; /** * Main constructor. @@ -780,23 +782,26 @@ public class ViewsDAO extends AbstractDAO { */ public FileTypeExtFetcher(FileTypeExtensionsSearchParams params) { super(params); + this.dao = MainDAO.getInstance().getViewsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getViewsDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return this.dao.getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return isFilesByExtInvalidating(this.getParameters(), evt); + return this.dao.isFilesByExtInvalidating(this.getParameters(), evt); } } /** * Handles fetching and paging of data for file types by mime type. */ - public class FileTypeMimeFetcher extends DAOFetcher { + public static class FileTypeMimeFetcher extends DAOFetcher { + + private final ViewsDAO dao; /** * Main constructor. @@ -805,16 +810,17 @@ public class ViewsDAO extends AbstractDAO { */ public FileTypeMimeFetcher(FileTypeMimeSearchParams params) { super(params); + this.dao = MainDAO.getInstance().getViewsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getViewsDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return this.dao.getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return isFilesByMimeInvalidating(this.getParameters(), evt); + return this.dao.isFilesByMimeInvalidating(this.getParameters(), evt); } } @@ -823,6 +829,8 @@ public class ViewsDAO extends AbstractDAO { */ public class FileTypeSizeFetcher extends DAOFetcher { + private final ViewsDAO dao; + /** * Main constructor. * @@ -830,16 +838,17 @@ public class ViewsDAO extends AbstractDAO { */ public FileTypeSizeFetcher(FileTypeSizeSearchParams params) { super(params); + this.dao = MainDAO.getInstance().getViewsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getViewsDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return this.dao.getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return isFilesBySizeInvalidating(this.getParameters(), evt); + return this.dao.isFilesBySizeInvalidating(this.getParameters(), evt); } } } From 07094de3552ec8b90b75415a7d603a210dfdc4cb Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 12 Nov 2021 09:11:16 -0500 Subject: [PATCH 039/142] fix in data fetcher --- .../autopsy/corecomponents/DataResultPanel.java | 3 +-- .../autopsy/mainui/datamodel/DataArtifactDAO.java | 10 ++++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index e47dadde1f..a0fb40b3d7 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -1152,8 +1152,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ void displayDataArtifact(DataArtifactSearchParam dataArtifactParams) { try { - DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO(); - this.searchResultManager = new SearchManager(dataArtDAO.new DataArtifactFetcher(dataArtifactParams), getPageSize()); + this.searchResultManager = new SearchManager(new DataArtifactFetcher(dataArtifactParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 899e434a78..e374c0e929 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -220,8 +220,9 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { /* * Handles fetching and paging of data artifacts. */ - public class DataArtifactFetcher extends DAOFetcher { - + public static class DataArtifactFetcher extends DAOFetcher { + private final DataArtifactDAO dao; + /** * Main constructor. * @@ -229,16 +230,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { */ public DataArtifactFetcher(DataArtifactSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getDataArtifactsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return dao.getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return DataArtifactDAO.this.isDataArtifactInvalidating(this.getParameters(), evt); + return dao.isDataArtifactInvalidating(this.getParameters(), evt); } } } From d6bfa4f9674004e2559f13559c6385c22da33885 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 12 Nov 2021 14:03:12 -0500 Subject: [PATCH 040/142] working through events --- .../mainui/datamodel/AnalysisResultDAO.java | 62 +++++- .../mainui/datamodel/DataArtifactDAO.java | 6 + .../mainui/datamodel/FileSystemDAO.java | 179 ++++++++++++------ .../mainui/datamodel/OsAccountsDAO.java | 2 + .../autopsy/mainui/datamodel/TagsDAO.java | 6 + .../autopsy/mainui/datamodel/ViewsDAO.java | 128 +++++++++---- .../events/FileSystemContentEvent.java | 66 +++++++ .../datamodel/events/FileSystemHostEvent.java | 67 +++++++ .../events/FileSystemPersonEvent.java | 72 +++++++ 9 files changed, 488 insertions(+), 100 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 9050e45385..0589b180e1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -28,6 +28,7 @@ import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -35,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; @@ -728,7 +730,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { for (BlackboardArtifact art : dataEvt.getArtifacts()) { try { if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { - // GVDTODO + // GVDTODO handle keyword hits } else if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()) { @@ -749,6 +751,27 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } } + // don't continue if no relevant items found + if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { + return Collections.emptyList(); + } + + clearRelevantCacheEntries(analysisResultMap, setMap); + + return getDAOEvents(analysisResultMap, setMap); + } + + /** + * Generate DAO events from digest of autopsy events. + * + * @param analysisResultMap A mapping of analysis result type ids to data + * sources where artifacts were created. + * @param setMap A mapping of (artifact type id, set name) to + * data sources where artifacts were created. + * + * @return The list of dao events. + */ + private List getDAOEvents(Map> analysisResultMap, Map, Set> setMap) { // invalidate cache entries that are affected by events // GVDTODO handle concurrency issues that may arise Stream analysisResultEvts = analysisResultMap.entrySet().stream() @@ -763,12 +786,44 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { .collect(Collectors.toList()); } + /** + * Clears cache entries given the provided digests of autopsy events. + * + * @param analysisResultMap A mapping of analysis result type ids to data + * sources where artifacts were created. + * @param setMap A mapping of (artifact type id, set name) to + * data sources where artifacts were created. + */ + private void clearRelevantCacheEntries(Map> analysisResultMap, Map, Set> setMap) { + ConcurrentMap, AnalysisResultTableSearchResultsDTO> arConcurrentMap = this.analysisResultCache.asMap(); + arConcurrentMap.forEach((k, v) -> { + BlackboardArtifactSearchParam searchParam = k.getParamData(); + Set dsIds = analysisResultMap.get(searchParam.getArtifactType().getTypeID()); + if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { + arConcurrentMap.remove(k); + } + }); + + ConcurrentMap, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap(); + setConcurrentMap.forEach((k, v) -> { + AnalysisResultSetSearchParam searchParam = k.getParamData(); + Set dsIds = setMap.get(Pair.of(searchParam.getArtifactType().getTypeID(), searchParam.getSetName())); + if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { + arConcurrentMap.remove(k); + } + }); + + // GVDTODO handle clearing cache for keyword search hits + // private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + } + /** * Handles fetching and paging of analysis results. */ public static class AnalysisResultFetcher extends DAOFetcher { + private final AnalysisResultDAO dao; - + /** * Main constructor. * @@ -794,6 +849,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { * Handles fetching and paging of hashset hits. */ public static class AnalysisResultSetFetcher extends DAOFetcher { + private final AnalysisResultDAO dao; /** @@ -823,7 +879,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public static class KeywordHitResultFetcher extends DAOFetcher { private final AnalysisResultDAO dao; - + /** * Main constructor. * diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index e374c0e929..34edc5d408 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -27,6 +27,7 @@ import java.beans.PropertyChangeEvent; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -192,6 +193,11 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); } }); + + // don't do anything else if no relevant events + if (artifactTypeDataSourceMap.isEmpty()) { + return Collections.emptyList(); + } // invalidate cache entries that are affected by events ConcurrentMap, DataArtifactTableSearchResultsDTO> concurrentMap = this.dataArtifactCache.asMap(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index ac46570b5d..056286e714 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -30,15 +30,27 @@ import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.apache.commons.collections.CollectionUtils; +import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsAddedEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsAddedToPersonEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsDeletedEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsRemovedFromPersonEvent; +import org.sleuthkit.autopsy.casemodule.events.HostsUpdatedEvent; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.DirectoryRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.ImageRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.VolumeRowDTO; @@ -49,9 +61,13 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.PoolRowDTO; import static org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.getExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemContentEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemHostEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemPersonEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Directory; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; @@ -72,10 +88,26 @@ import org.sleuthkit.datamodel.Volume; */ public class FileSystemDAO extends AbstractDAO { + private static final Logger logger = Logger.getLogger(FileSystemDAO.class.getName()); + private static final int CACHE_SIZE = 15; // rule of thumb: 5 entries times number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + + private static final Set HOST_LEVEL_EVTS = ImmutableSet.of( + Case.Events.DATA_SOURCE_ADDED.toString(), + // this should trigger the case to be reopened + // Case.Events.DATA_SOURCE_DELETED.toString(), + Case.Events.DATA_SOURCE_NAME_CHANGED.toString(), + Case.Events.HOSTS_ADDED.toString(), + Case.Events.HOSTS_DELETED.toString(), + Case.Events.HOSTS_UPDATED.toString() + ); + + private static final Set PERSON_LEVEL_EVTS = ImmutableSet.of( + Case.Events.HOSTS_ADDED_TO_PERSON.toString(), + Case.Events.HOSTS_REMOVED_FROM_PERSON.toString() + ); private static final String FILE_SYSTEM_TYPE_ID = "FILE_SYSTEM"; @@ -88,23 +120,22 @@ public class FileSystemDAO extends AbstractDAO { return instance; } - public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, Content eventContent) { - if (!(eventContent instanceof Content)) { - return false; - } - try { - return key.getContentObjectId() != eventContent.getParent().getId(); - } catch (TskCoreException ex) { - // There is nothing we can do with the exception. + private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + + public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, DAOEvent daoEvent) { + if (!(daoEvent instanceof FileSystemContentEvent)) { return false; } + + return key.getContentObjectId() == ((FileSystemContentEvent) daoEvent).getContentObjectId(); } - public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, Host eventHost) { - if (!(eventHost instanceof Host)) { + public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, DAOEvent daoEvent) { + if (!(daoEvent instanceof FileSystemHostEvent)) { return false; } - return key.getHostObjectId() != eventHost.getHostId(); + + return key.getHostObjectId() == ((FileSystemHostEvent) daoEvent).getHostObjectId(); } private BaseSearchResultsDTO fetchContentForTableFromContent(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -301,44 +332,82 @@ public class FileSystemDAO extends AbstractDAO { this.searchParamsCache.invalidateAll(); } - private static final Set DATA_SOURCE_EVTS = ImmutableSet.of( - Case.Events.DATA_SOURCE_ADDED.toString(), - Case.Events.DATA_SOURCE_DELETED.toString(), - Case.Events.DATA_SOURCE_NAME_CHANGED.toString() - ); + private Long getHostFromDs(Content dataSource) { + if (!(dataSource instanceof DataSource)) { + return null; + } - private static final Set HOST_EVTS = ImmutableSet.of( - Case.Events.HOSTS_ADDED.toString(), - Case.Events.HOSTS_ADDED_TO_PERSON.toString(), - Case.Events.HOSTS_DELETED.toString(), - Case.Events.HOSTS_REMOVED_FROM_PERSON.toString(), - Case.Events.HOSTS_UPDATED.toString() - ); + try { + Host host = ((DataSource) dataSource).getHost(); + return host == null ? null : host.getHostId(); + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "There was an error getting the host for data source with id: " + dataSource.getId(), ex); + return null; + } + } @Override List handleAutopsyEvent(Collection evts) { -// Set affectedPersons = new HashSet<>(); -// Set affectedHosts = new HashSet<>(); -// Set affectedParentContent = new HashSet<>(); -// -// for (PropertyChangeEvent evt : evts) { -// Content content = DAOEventUtils.getContentFromEvt(evt); -// if (content != null) { -// affectedParentContent.add(content.getParentId()); -// continue; -// } -// -// String propName = evt.getPropertyName(); -// if (DATA_SOURCE_EVTS.contains(propName)) { -// affectedHosts.add(evt.getHostId()); -// } else if (HOST_EVTS.contains(propName)) { -// affectedPersons.add(evt.getPersonId()); -// } -// } + Set affectedPersons = new HashSet<>(); + Set affectedHosts = new HashSet<>(); + Set affectedParentContent = new HashSet<>(); - // GVDTODO clear affected cache entries - // GVDTODO generate events - return Collections.emptyList(); + for (PropertyChangeEvent evt : evts) { + Content content = DAOEventUtils.getContentFromEvt(evt); + if (content != null && content.getParentId().isPresent()) { + affectedParentContent.add(content.getParentId().get()); + } else if (evt instanceof DataSourceAddedEvent) { + Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource()); + if (hostId != null) { + affectedHosts.add(hostId); + } + } else if (evt instanceof DataSourceNameChangedEvent) { + Long hostId = getHostFromDs(((DataSourceNameChangedEvent) evt).getDataSource()); + if (hostId != null) { + affectedHosts.add(hostId); + } + } else if (evt instanceof HostsAddedEvent) { + // GVDTODO how best to handle host added? + } else if (evt instanceof HostsUpdatedEvent) { + // GVDTODO how best to handle host updated? + } else if (evt instanceof HostsAddedToPersonEvent) { + Person person = ((HostsAddedToPersonEvent) evt).getPerson(); + affectedPersons.add(person == null ? null : person.getPersonId()); + } else if (evt instanceof HostsRemovedFromPersonEvent) { + Person person = ((HostsRemovedFromPersonEvent) evt).getPerson(); + affectedPersons.add(person == null ? null : person.getPersonId()); + } + } + + // GVDTODO handling null ids versus the 'No Persons' option + ConcurrentMap, BaseSearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + Object searchParams = k.getParamData(); + if (searchParams instanceof FileSystemPersonSearchParam) { + FileSystemPersonSearchParam personParam = (FileSystemPersonSearchParam) searchParams; + if (affectedPersons.contains(personParam.getPersonObjectId())) { + concurrentMap.remove(k); + } + } else if (searchParams instanceof FileSystemHostSearchParam) { + FileSystemHostSearchParam hostParams = (FileSystemHostSearchParam) searchParams; + if (affectedHosts.contains(hostParams.getHostObjectId())) { + concurrentMap.remove(k); + } + } else if (searchParams instanceof FileSystemContentSearchParam) { + FileSystemContentSearchParam contentParams = (FileSystemContentSearchParam) searchParams; + if (affectedParentContent.contains(contentParams)) { + concurrentMap.remove(k); + } + } + }); + + return Stream.of( + affectedPersons.stream().map(id -> new FileSystemPersonEvent(id)), + affectedHosts.stream().map(id -> new FileSystemHostEvent(id)), + affectedParentContent.stream().map(id -> new FileSystemContentEvent(id)) + ) + .flatMap(s -> s) + .collect(Collectors.toList()); } /** @@ -365,17 +434,14 @@ public class FileSystemDAO extends AbstractDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - return this.dao.Content content = getContentFromEvt(evt); - if (content == null) { - return false; - } - - return MainDAO.getInstance().getFileSystemDAO().isSystemContentInvalidating(getParameters(), content); + return this.dao.isSystemContentInvalidating(this.getParameters(), evt); } } public static class FileSystemHostFetcher extends DAOFetcher { + private final FileSystemDAO dao; + /** * Main constructor. * @@ -383,18 +449,17 @@ public class FileSystemDAO extends AbstractDAO { */ public FileSystemHostFetcher(FileSystemHostSearchParam params) { super(params); + this.dao = MainDAO.getInstance().getFileSystemDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getFileSystemDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return this.dao.getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - // TODO implement the method for determining if - // a refresh is needed. - return false; + public boolean isRefreshRequired(DAOEvent evt) { + return this.dao.isSystemHostInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index d8570472a9..2ab2b8280f 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; @@ -191,6 +192,7 @@ public class OsAccountsDAO extends AbstractDAO { List handleAutopsyEvent(Collection evts) { List daoEvts = evts.stream().filter(evt -> OS_EVENTS.contains(evt.getPropertyName())) .map(evt -> new OsAccountEvent()) + .limit(1) .collect(Collectors.toList()); if (!daoEvts.isEmpty()) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 192fca67d5..01ebcd605a 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -25,6 +25,7 @@ import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -326,6 +327,11 @@ public class TagsDAO extends AbstractDAO { .add(Optional.ofNullable(data.getRight())); } } + + // don't continue if no mapping entries + if (mapping.isEmpty()) { + return Collections.emptyList(); + } ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); concurrentMap.forEach((k, v) -> { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index dd2f73a97e..354e30c4cf 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -25,6 +25,7 @@ import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -55,7 +56,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeSizeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -676,12 +676,14 @@ public class ViewsDAO extends AbstractDAO { continue; } + // create an extension mapping if extension present if (!StringUtils.isBlank(af.getNameExtension())) { fileExtensionDsMap .computeIfAbsent(af.getNameExtension(), (k) -> new HashSet<>()) .add(af.getDataSourceObjectId()); } + // create a mime type mapping if mime type present if (!StringUtils.isBlank(af.getMIMEType())) { Pair mimePieces = getMimePieces(af.getMIMEType()); mimeTypeDsMap @@ -690,6 +692,7 @@ public class ViewsDAO extends AbstractDAO { .add(af.getDataSourceObjectId()); } + // create a size mapping if size present FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values()) .filter(filter -> af.getSize() >= filter.getMinBound() && af.getSize() < filter.getMaxBound()) .findFirst() @@ -702,48 +705,31 @@ public class ViewsDAO extends AbstractDAO { } } - // invalidate cache entries that are affected by events - ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); - concurrentMap.forEach((k, v) -> { - Object baseParams = k.getParamData(); - if (baseParams instanceof FileTypeExtensionsSearchParams) { - FileTypeExtensionsSearchParams extParams = (FileTypeExtensionsSearchParams) baseParams; - boolean isMatch = extParams.getFilter().getFilter().stream().anyMatch((ext) -> { - Set dsIds = fileExtensionDsMap.get(ext); - return (dsIds != null && (extParams.getDataSourceId() == null || dsIds.contains(extParams.getDataSourceId()))); - }); + if (fileExtensionDsMap.isEmpty() || mimeTypeDsMap.isEmpty() || fileSizeDsMap.isEmpty()) { + return Collections.emptyList(); + } - if (isMatch) { - concurrentMap.remove(k); - } - } else if (baseParams instanceof FileTypeMimeSearchParams) { - FileTypeMimeSearchParams mimeParams = (FileTypeMimeSearchParams) baseParams; - Pair mimePieces = getMimePieces(mimeParams.getMimeType()); - Map> suffixes = mimeTypeDsMap.get(mimePieces.getKey()); - if (suffixes == null) { - return; - } + clearRelevantCacheEntries(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap); - if (mimePieces.getValue() == null - && (mimeParams.getDataSourceId() == null - || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> ds == mimeParams.getDataSourceId()))) { + return getDAOEvents(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap); + } - concurrentMap.remove(k); - } else { - Set dataSources = suffixes.get(mimePieces.getValue()); - if (dataSources != null && (mimeParams.getDataSourceId() == null || dataSources.contains(mimeParams.getDataSourceId()))) { - concurrentMap.remove(k); - } - } - - } else if (baseParams instanceof FileTypeSizeSearchParams) { - FileTypeSizeSearchParams sizeParams = (FileTypeSizeSearchParams) baseParams; - Set dataSources = fileSizeDsMap.get(sizeParams.getSizeFilter()); - if (dataSources != null && (sizeParams.getDataSourceId() == null || dataSources.contains(sizeParams.getDataSourceId()))) { - concurrentMap.remove(k); - } - } - }); + /** + * + * Clears relevant cache entries from cache based on digest of autopsy + * events. + * + * @param fileExtensionDsMap Maps the file extension to the data sources + * where files were found with that extension. + * @param mimeTypeDsMap Maps the mime type to the data sources where + * files were found with that mime type. + * @param fileSizeDsMap Maps the size to the data sources where files + * + * @return The list of affected dao events. + */ + private List getDAOEvents(Map> fileExtensionDsMap, + Map>> mimeTypeDsMap, + Map> fileSizeDsMap) { Stream fileExtStream = fileExtensionDsMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeExtensionsEvent(entry.getKey(), dsId))); @@ -768,6 +754,68 @@ public class ViewsDAO extends AbstractDAO { .collect(Collectors.toList()); } + /** + * Clears relevant cache entries from cache based on digest of autopsy + * events. + * + * @param fileExtensionDsMap Maps the file extension to the data sources + * where files were found with that extension. + * @param mimeTypeDsMap Maps the mime type to the data sources where + * files were found with that mime type. + * @param fileSizeDsMap Maps the size to the data sources where files + * were found within that size filter. + */ + private void clearRelevantCacheEntries(Map> fileExtensionDsMap, + Map>> mimeTypeDsMap, + Map> fileSizeDsMap) { + + // invalidate cache entries that are affected by events + ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + Object baseParams = k.getParamData(); + if (baseParams instanceof FileTypeExtensionsSearchParams) { + FileTypeExtensionsSearchParams extParams = (FileTypeExtensionsSearchParams) baseParams; + // if search params have a filter where extension is present and the data source id is null or == + boolean isMatch = extParams.getFilter().getFilter().stream().anyMatch((ext) -> { + Set dsIds = fileExtensionDsMap.get(ext); + return (dsIds != null && (extParams.getDataSourceId() == null || dsIds.contains(extParams.getDataSourceId()))); + }); + + if (isMatch) { + concurrentMap.remove(k); + } + } else if (baseParams instanceof FileTypeMimeSearchParams) { + FileTypeMimeSearchParams mimeParams = (FileTypeMimeSearchParams) baseParams; + Pair mimePieces = getMimePieces(mimeParams.getMimeType()); + Map> suffixes = mimeTypeDsMap.get(mimePieces.getKey()); + if (suffixes == null) { + return; + } + + // if search params is top level mime prefix (without suffix) and data source is null or ==. + if (mimePieces.getValue() == null + && (mimeParams.getDataSourceId() == null + || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> ds == mimeParams.getDataSourceId()))) { + + concurrentMap.remove(k); + // otherwise, see if suffix is present + } else { + Set dataSources = suffixes.get(mimePieces.getValue()); + if (dataSources != null && (mimeParams.getDataSourceId() == null || dataSources.contains(mimeParams.getDataSourceId()))) { + concurrentMap.remove(k); + } + } + + } else if (baseParams instanceof FileTypeSizeSearchParams) { + FileTypeSizeSearchParams sizeParams = (FileTypeSizeSearchParams) baseParams; + Set dataSources = fileSizeDsMap.get(sizeParams.getSizeFilter()); + if (dataSources != null && (sizeParams.getDataSourceId() == null || dataSources.contains(sizeParams.getDataSourceId()))) { + concurrentMap.remove(k); + } + } + }); + } + /** * Handles fetching and paging of data for file types by extension. */ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java new file mode 100644 index 0000000000..1cb519bd16 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java @@ -0,0 +1,66 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event that affects the given parent content. + */ +public class FileSystemContentEvent implements DAOEvent { + private final Long contentObjectId; + + public FileSystemContentEvent(Long contentObjectId) { + this.contentObjectId = contentObjectId; + } + + public Long getContentObjectId() { + return contentObjectId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 67 * hash + Objects.hashCode(this.contentObjectId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileSystemContentEvent other = (FileSystemContentEvent) obj; + if (!Objects.equals(this.contentObjectId, other.contentObjectId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java new file mode 100644 index 0000000000..1788463cc1 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java @@ -0,0 +1,67 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event that affects the given host. + */ +public class FileSystemHostEvent implements DAOEvent { + + private final Long hostObjectId; + + public FileSystemHostEvent(Long hostObjectId) { + this.hostObjectId = hostObjectId; + } + + public Long getHostObjectId() { + return hostObjectId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 67 * hash + Objects.hashCode(this.hostObjectId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileSystemHostEvent other = (FileSystemHostEvent) obj; + if (!Objects.equals(this.hostObjectId, other.hostObjectId)) { + return false; + } + return true; + } + + @Override + public DAOEvent.Type getType() { + return DAOEvent.Type.RESULT; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java new file mode 100644 index 0000000000..bcb9db888f --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java @@ -0,0 +1,72 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event that affects the given person. + */ +public class FileSystemPersonEvent implements DAOEvent { + + private final Long personObjectId; + + /** + * Main constructor. + * + * @param personObjectId May be null for hosts with no associated Person. + */ + public FileSystemPersonEvent(Long personObjectId) { + this.personObjectId = personObjectId; + } + + public Long getPersonObjectId() { + return personObjectId; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 67 * hash + Objects.hashCode(this.personObjectId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileSystemPersonEvent other = (FileSystemPersonEvent) obj; + if (!Objects.equals(this.personObjectId, other.personObjectId)) { + return false; + } + return true; + } + + @Override + public DAOEvent.Type getType() { + return DAOEvent.Type.RESULT; + } +} From c1646d7c0d6870933020e96eb776a8b86fb9f907 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 12 Nov 2021 14:21:09 -0500 Subject: [PATCH 041/142] bug fixes --- .../autopsy/mainui/datamodel/FileSystemDAO.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 6f24fd7543..c4214c3f70 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -60,6 +60,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemContentEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemHostEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemPersonEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; +import org.sleuthkit.datamodel.AbstractContent; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; @@ -349,8 +350,15 @@ public class FileSystemDAO extends AbstractDAO { for (PropertyChangeEvent evt : evts) { Content content = DAOEventUtils.getContentFromEvt(evt); - if (content != null && content.getParentId().isPresent()) { - affectedParentContent.add(content.getParentId().get()); + if (content instanceof AbstractContent) { + try { + Optional parentId = ((AbstractContent) content).getParentId(); + if (parentId.isPresent()) { + affectedParentContent.add(parentId.get()); + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "An exception occurred getting the parent id of content: " + content.getId(), ex); + } } else if (evt instanceof DataSourceAddedEvent) { Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource()); if (hostId != null) { From 942689f28f20ba7526bc6a6b6ce64d652ebafb4b Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 12 Nov 2021 15:47:20 -0500 Subject: [PATCH 042/142] bug fixes --- .../mainui/datamodel/AnalysisResultDAO.java | 2 +- .../autopsy/mainui/datamodel/MainDAO.java | 29 ++++++-- .../autopsy/mainui/datamodel/TagsDAO.java | 69 ++++++++----------- 3 files changed, 53 insertions(+), 47 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 0589b180e1..7709ac26fb 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -740,7 +740,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); - } else if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { + } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 48551a565c..72a5011673 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; +import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.List; @@ -33,6 +34,7 @@ import java.util.prefs.PreferenceChangeListener; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.collections.CollectionUtils; +import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.ingest.IngestManager; @@ -43,8 +45,18 @@ import org.sleuthkit.autopsy.ingest.IngestManager; */ public class MainDAO extends AbstractDAO { - private static final Set INGEST_MODULE_EVENTS = EnumSet.of(IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED); - private static final Set CASE_EVENTS = EnumSet.of(Case.Events.CURRENT_CASE); + private static final Set INGEST_MODULE_EVENTS = EnumSet.of( + IngestManager.IngestModuleEvent.CONTENT_CHANGED, + IngestManager.IngestModuleEvent.DATA_ADDED + ); + + private static final Set QUEUED_CASE_EVENTS = ImmutableSet.of( + Case.Events.OS_ACCOUNTS_ADDED.toString(), + Case.Events.OS_ACCOUNTS_UPDATED.toString(), + Case.Events.OS_ACCOUNTS_DELETED.toString(), + Case.Events.OS_ACCT_INSTANCES_ADDED.toString() + ); + private static final long MILLIS_BATCH = 5000; private static MainDAO instance = null; @@ -64,8 +76,11 @@ public class MainDAO extends AbstractDAO { private final PropertyChangeListener caseEventListener = (evt) -> { if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { this.clearCaches(); + } else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) { + queueAutopsyEvent(evt); } else { - handleAutopsyEvent(evt); + // handle case events immediately + handleAutopsyEvent(Arrays.asList(evt)); } }; @@ -80,7 +95,7 @@ public class MainDAO extends AbstractDAO { * The ingest module event listener. */ private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - handleAutopsyEvent(evt); + queueAutopsyEvent(evt); }; private final PropertyChangeSupport support = new PropertyChangeSupport(this); @@ -166,7 +181,7 @@ public class MainDAO extends AbstractDAO { */ void register() { IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); - Case.addEventTypeSubscriber(CASE_EVENTS, caseEventListener); + Case.addPropertyChangeListener(caseEventListener); UserPreferences.addChangeListener(userPreferenceListener); } @@ -180,7 +195,7 @@ public class MainDAO extends AbstractDAO { */ void unregister() { IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); - Case.removeEventTypeSubscriber(CASE_EVENTS, caseEventListener); + Case.removePropertyChangeListener(caseEventListener); UserPreferences.removeChangeListener(userPreferenceListener); } @@ -189,7 +204,7 @@ public class MainDAO extends AbstractDAO { * * @param autopsyEvent The autopsy event. */ - private void handleAutopsyEvent(PropertyChangeEvent autopsyEvent) { + private void queueAutopsyEvent(PropertyChangeEvent autopsyEvent) { this.eventBatcher.queueEvent(autopsyEvent); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 01ebcd605a..43abd670e1 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -327,7 +327,7 @@ public class TagsDAO extends AbstractDAO { .add(Optional.ofNullable(data.getRight())); } } - + // don't continue if no mapping entries if (mapping.isEmpty()) { return Collections.emptyList(); @@ -369,44 +369,35 @@ public class TagsDAO extends AbstractDAO { * from event). */ private Triple getTagData(PropertyChangeEvent evt) { + if (evt instanceof BlackBoardArtifactTagAddedEvent) { + BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; + // ensure tag added event has a valid content id + if (event.getAddedTag() != null + && event.getAddedTag().getContent() != null + && event.getAddedTag().getArtifact() != null) { + return Triple.of(TagType.RESULT, event.getAddedTag().getName().getId(), event.getAddedTag().getArtifact().getDataSourceObjectID()); + } - String eventType = evt.getPropertyName(); - - if (eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED.toString()) - || eventType.equals(Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED.toString()) - || eventType.equals(Case.Events.CONTENT_TAG_ADDED.toString()) - || eventType.equals(Case.Events.CONTENT_TAG_DELETED.toString())) { - - if (evt instanceof BlackBoardArtifactTagAddedEvent) { - BlackBoardArtifactTagAddedEvent event = (BlackBoardArtifactTagAddedEvent) evt; - // ensure tag added event has a valid content id - if (event.getAddedTag() != null - && event.getAddedTag().getContent() != null - && event.getAddedTag().getArtifact() != null) { - return Triple.of(TagType.RESULT, event.getAddedTag().getName().getId(), event.getAddedTag().getArtifact().getDataSourceObjectID()); - } - - } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { - BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; - BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); - if (deletedTagInfo != null) { - return Triple.of(TagType.RESULT, deletedTagInfo.getName().getId(), null); - } - } else if (evt instanceof ContentTagAddedEvent) { - ContentTagAddedEvent event = (ContentTagAddedEvent) evt; - // ensure tag added event has a valid content id - if (event.getAddedTag() != null && event.getAddedTag().getContent() != null) { - Content content = event.getAddedTag().getContent(); - Long dsId = content instanceof AbstractFile ? ((AbstractFile) content).getDataSourceObjectId() : null; - return Triple.of(TagType.FILE, event.getAddedTag().getName().getId(), dsId); - } - } else if (evt instanceof ContentTagDeletedEvent) { - ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; - // ensure tag deleted event has a valid content id - ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); - if (deletedTagInfo != null) { - return Triple.of(TagType.FILE, deletedTagInfo.getName().getId(), null); - } + } else if (evt instanceof BlackBoardArtifactTagDeletedEvent) { + BlackBoardArtifactTagDeletedEvent event = (BlackBoardArtifactTagDeletedEvent) evt; + BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo deletedTagInfo = event.getDeletedTagInfo(); + if (deletedTagInfo != null) { + return Triple.of(TagType.RESULT, deletedTagInfo.getName().getId(), null); + } + } else if (evt instanceof ContentTagAddedEvent) { + ContentTagAddedEvent event = (ContentTagAddedEvent) evt; + // ensure tag added event has a valid content id + if (event.getAddedTag() != null && event.getAddedTag().getContent() != null) { + Content content = event.getAddedTag().getContent(); + Long dsId = content instanceof AbstractFile ? ((AbstractFile) content).getDataSourceObjectId() : null; + return Triple.of(TagType.FILE, event.getAddedTag().getName().getId(), dsId); + } + } else if (evt instanceof ContentTagDeletedEvent) { + ContentTagDeletedEvent event = (ContentTagDeletedEvent) evt; + // ensure tag deleted event has a valid content id + ContentTagDeletedEvent.DeletedContentTagInfo deletedTagInfo = event.getDeletedTagInfo(); + if (deletedTagInfo != null) { + return Triple.of(TagType.FILE, deletedTagInfo.getName().getId(), null); } } return null; @@ -418,7 +409,7 @@ public class TagsDAO extends AbstractDAO { public static class TagFetcher extends DAOFetcher { private final TagsDAO dao; - + /** * Main constructor. * From 9a63ce2b366a815cd46f50c853a1c948014d7b4d Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 15 Nov 2021 13:25:29 -0500 Subject: [PATCH 043/142] 7895 CR data artifact ingest module --- .../CentralRepoDataArtifactIngestModule.java | 187 ++++++++---------- .../SevenZipExtractor.java | 4 +- .../filetypeid/FileTypeIdIngestModule.java | 3 +- .../FilesIdentifierIngestModule.java | 3 +- 4 files changed, 90 insertions(+), 107 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index 74a42a3f43..dd568fe840 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.centralrepository.ingestmodule; import java.util.ArrayList; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; @@ -42,12 +43,10 @@ import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIn import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.OsAccountManager; -import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -65,6 +64,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo private final boolean flagPrevSeenDevices; private final boolean flagUniqueArtifacts; private final boolean saveCorrAttrInstances; + private final Set corrAttrValuesProcessed; private CentralRepository centralRepo; private IngestJobContext context; @@ -82,6 +82,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo flagPrevSeenDevices = settings.isFlagPreviousDevices(); flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); + corrAttrValuesProcessed = new LinkedHashSet<>(); } @NbBundle.Messages({ @@ -130,90 +131,27 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo */ @Override public ProcessResult process(DataArtifact artifact) { - analyze(artifact); + if (!flagNotableItems && !flagPrevSeenDevices && !flagUniqueArtifacts && !saveCorrAttrInstances) { + for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)) { + if (corrAttrValuesProcessed.add(corrAttr.toString())) { + /* + * The correlation attribute is not in set yet, so it has + * not been processed yet. + */ + makeAnalysisResults(artifact, corrAttr); + if (saveCorrAttrInstances) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s' (job ID=%d)", corrAttr, artifact, context.getJobId()), ex); //NON-NLS + } + } + } + } + } return ProcessResult.OK; } - @Override - public void shutDown() { - analyzeOsAccounts(); - if (saveCorrAttrInstances) { - try { - centralRepo.commitAttributeInstancesBulk(); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS - } - } - syncDataSourceHashes(); - } - - /** - * Translates the attributes of an OS account into central repository - * correlation attributes and uses them to create analysis results and new - * central repository correlation attribute instances, depending on ingest - * job settings. - */ - @NbBundle.Messages({ - "CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", - "CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" - }) - private void analyzeOsAccounts() { - if (saveCorrAttrInstances || flagPrevSeenDevices) { - try { - Case currentCase = Case.getCurrentCaseThrows(); - SleuthkitCase tskCase = currentCase.getSleuthkitCase(); - OsAccountManager osAccountMgr = tskCase.getOsAccountManager(); - List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(context.getDataSource().getId()); - for (OsAccount osAccount : osAccounts) { - analyze(osAccount); - } - } catch (NoCurrentCaseException | TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", context.getDataSource(), context.getJobId()), ex); - } - } - } - - /** - * Translates the attributes of a data artifact or an OS account into - * central repository correlation attributes and uses them to create - * analysis results and new central repository correlation attribute - * instances, depending on ingest job settings. - * - * @param content The artifact or account. - */ - private void analyze(Content content) { - if (content == null || (!flagNotableItems && !flagPrevSeenDevices && !flagUniqueArtifacts && !saveCorrAttrInstances)) { - return; - } - - DataArtifact artifact = null; - OsAccount osAccount = null; - List corrAttrs = new ArrayList<>(); - if (content instanceof DataArtifact) { - artifact = (DataArtifact) content; - corrAttrs.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)); - } else { - osAccount = (OsAccount) content; - corrAttrs.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, context.getDataSource())); - } - - for (CorrelationAttributeInstance corrAttr : corrAttrs) { - if (artifact != null) { - makeAnalysisResults(artifact, corrAttr); - } else { - makeAnalysisResults(osAccount, corrAttr); - } - - if (saveCorrAttrInstances) { - try { - centralRepo.addAttributeInstanceBulk(corrAttr); - } catch (CentralRepoException ex) { - LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for (object ID=%d, job ID=%d)", corrAttr, content.getId(), context.getJobId()), ex); //NON-NLS - } - } - } - } - /** * Makes analysis results for a data artifact based on previous occurrences, * if any, of a correlation attribute. @@ -270,7 +208,72 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } /** - * Makes analysis results for a data artifact based on previous occurrences, + * Gets a unique set of previous cases, represented by their names, from a + * list of previous occurrences of correlation attributes. + * + * @param previousOccurrences The correlations attributes. + * + * @return The names of the previous cases. + */ + private Set getPreviousCases(List previousOccurrences) { + Set previousCases = new HashSet<>(); + for (CorrelationAttributeInstance occurrence : previousOccurrences) { + previousCases.add(occurrence.getCorrelationCase().getDisplayName()); + } + return previousCases; + } + + @Override + public void shutDown() { + analyzeOsAccounts(); + if (saveCorrAttrInstances) { + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS + } + } + syncDataSourceHashes(); + } + + /** + * Queries the case database for any OS accounts assoicated with the data + * source for the ingest job. The attributes of any OS account returned by + * the query are translated into central repository correlation attributes + * and used them to create analysis results and new central repository + * correlation attribute instances, depending on ingest job settings. + */ + @NbBundle.Messages({ + "CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases", + "CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)" + }) + private void analyzeOsAccounts() { + if (saveCorrAttrInstances || flagPrevSeenDevices) { + try { + OsAccountManager osAccountMgr = Case.getCurrentCaseThrows().getSleuthkitCase().getOsAccountManager(); + List osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(context.getDataSource().getId()); + for (OsAccount osAccount : osAccounts) { + for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, context.getDataSource())) { + if (flagPrevSeenDevices) { + makeAnalysisResults(osAccount, corrAttr); + } + if (saveCorrAttrInstances) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s'(job ID=%d)", corrAttr, osAccount, context.getJobId()), ex); //NON-NLS + } + } + } + } + } catch (NoCurrentCaseException | TskCoreException ex) { + LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", context.getDataSource(), context.getJobId()), ex); + } + } + } + + /** + * Makes analysis results for an OS Account based on previous occurrences, * if any, of a correlation attribute. * * @param artifact The data artifact. @@ -288,22 +291,6 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } - /** - * Gets a unique set of previous cases, represented by their names, from a - * list of previous occurrences of correlation attributes. - * - * @param previousOccurrences The correlations attributes. - * - * @return The names of the previous cases. - */ - private Set getPreviousCases(List previousOccurrences) { - Set previousCases = new HashSet<>(); - for (CorrelationAttributeInstance occurrence : previousOccurrences) { - previousCases.add(occurrence.getCorrelationCase().getDisplayName()); - } - return previousCases; - } - /** * Ensures the data source in the central repository has hash values that * match those in the case database. diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java index 3e6e4a9b07..1d53771dd8 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java @@ -69,7 +69,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION; @@ -327,8 +326,7 @@ class SevenZipExtractor { TSK_COMMENT, MODULE_NAME, details)); - if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_ITEM, attributes)) { - + if (!blackboard.artifactExists(archiveFile, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) { BlackboardArtifact artifact = rootArchive.getArchiveFile().newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE, null, setName, null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 78b8070852..a2c37ce005 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -36,7 +36,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFil import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; @@ -194,7 +193,7 @@ public class FileTypeIdIngestModule implements FileIngestModule { Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard(); // Create artifact if it doesn't already exist. - if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_ITEM, attributes)) { + if (!tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) { BlackboardArtifact artifact = file.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE, null, fileType.getInterestingFilesSetName(), null, diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java index 284b595dc0..8ee1cf1589 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java @@ -39,7 +39,6 @@ import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM; import org.sleuthkit.datamodel.BlackboardAttribute; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; @@ -142,7 +141,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule { ); // Create artifact if it doesn't already exist. - if (!blackboard.artifactExists(file, TSK_INTERESTING_ITEM, attributes)) { + if (!blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) { BlackboardArtifact artifact = file.newAnalysisResult( BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE, null, filesSet.getName(), null, From e238fb9c9b64f6332aad1ab5f84e7a6e41eb1561 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 17 Nov 2021 07:36:07 -0500 Subject: [PATCH 044/142] 7895 CR data artifact ingest module --- .../CentralRepoDataArtifactIngestModule.java | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java index dd568fe840..0729cad326 100755 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoDataArtifactIngestModule.java @@ -18,7 +18,6 @@ */ package org.sleuthkit.autopsy.centralrepository.ingestmodule; -import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -64,7 +63,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo private final boolean flagPrevSeenDevices; private final boolean flagUniqueArtifacts; private final boolean saveCorrAttrInstances; - private final Set corrAttrValuesProcessed; + private final Set corrAttrValuesAlreadyProcessed; private CentralRepository centralRepo; private IngestJobContext context; @@ -82,7 +81,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo flagPrevSeenDevices = settings.isFlagPreviousDevices(); flagUniqueArtifacts = settings.isFlagUniqueArtifacts(); saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); - corrAttrValuesProcessed = new LinkedHashSet<>(); + corrAttrValuesAlreadyProcessed = new LinkedHashSet<>(); } @NbBundle.Messages({ @@ -131,13 +130,9 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo */ @Override public ProcessResult process(DataArtifact artifact) { - if (!flagNotableItems && !flagPrevSeenDevices && !flagUniqueArtifacts && !saveCorrAttrInstances) { + if (flagNotableItems || flagPrevSeenDevices || flagUniqueArtifacts || saveCorrAttrInstances) { for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)) { - if (corrAttrValuesProcessed.add(corrAttr.toString())) { - /* - * The correlation attribute is not in set yet, so it has - * not been processed yet. - */ + if (corrAttrValuesAlreadyProcessed.add(corrAttr.toString())) { makeAnalysisResults(artifact, corrAttr); if (saveCorrAttrInstances) { try { @@ -267,7 +262,7 @@ public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestMo } } } catch (NoCurrentCaseException | TskCoreException ex) { - LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source %s (job ID=%d)", context.getDataSource(), context.getJobId()), ex); + LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source '%s' (job ID=%d)", context.getDataSource(), context.getJobId()), ex); } } } From 32f1cd031bafd5c894568716e5bf67dd02c06eac Mon Sep 17 00:00:00 2001 From: apriestman Date: Wed, 17 Nov 2021 12:55:46 -0500 Subject: [PATCH 045/142] Switching to new tree nodes --- .../datamodel/DataSourceFilesNode.java | 4 +- .../datamodel/DataSourceGroupingNode.java | 7 +- .../autopsy/datamodel/DataSourcesNode.java | 1 + .../sleuthkit/autopsy/datamodel/HostNode.java | 5 +- .../DirectoryTreeFilterChildren.java | 6 +- .../autopsy/ingest/Bundle.properties-MERGED | 12 + .../mainui/datamodel/Bundle.properties-MERGED | 2 + .../datamodel/FileSystemColumnUtils.java | 117 +++- .../mainui/datamodel/FileSystemDAO.java | 111 +++- .../mainui/datamodel/TreeResultsDTO.java | 17 +- .../mainui/nodes/Bundle.properties-MERGED | 1 + .../autopsy/mainui/nodes/DirectoryNode.java | 4 +- .../autopsy/mainui/nodes/FileNode.java | 10 +- .../mainui/nodes/FileSystemFactory.java | 589 ++++++++++++++++++ .../autopsy/mainui/nodes/ImageNode.java | 2 +- .../autopsy/mainui/nodes/NodeIconUtil.java | 68 ++ .../autopsy/mainui/nodes/PoolNode.java | 2 +- .../autopsy/mainui/nodes/VolumeNode.java | 2 +- 18 files changed, 938 insertions(+), 22 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java index b7e33860b9..af5db5b700 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java @@ -34,6 +34,7 @@ import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; @@ -69,7 +70,8 @@ public class DataSourceFilesNode extends DisplayableItemNode { } public DataSourceFilesNode(long dsObjId) { - super(Children.create(new DataSourcesNodeChildren(dsObjId), true), Lookups.singleton(NAME)); + //super(Children.create(new DataSourcesNodeChildren(dsObjId), true), Lookups.singleton(NAME)); + super(Children.create(new FileSystemFactory.DataSourceFactory(dsObjId), true), Lookups.singleton(NAME)); displayName = (dsObjId > 0) ? NbBundle.getMessage(DataSourceFilesNode.class, "DataSourcesNode.group_by_datasource.name") : NAME; init(); } diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java index 8e99aa2172..13cbd65159 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java @@ -22,10 +22,12 @@ import java.util.Arrays; import java.util.Collections; import java.util.Optional; import java.util.logging.Level; +import org.openide.nodes.Children; import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LocalFilesDataSource; @@ -48,7 +50,10 @@ class DataSourceGroupingNode extends DisplayableItemNode { super(Optional.ofNullable(createDSGroupingNodeChildren(dataSource)) .orElse(new RootContentChildren(Arrays.asList(Collections.EMPTY_LIST))), Lookups.singleton(dataSource)); - + // TODO other part + //super(Children.create(new FileSystemFactory(dataSource.getId()), true), + // Lookups.singleton(dataSource)); + if (dataSource instanceof Image) { Image image = (Image) dataSource; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java index e735eca3d9..5c0c9720b4 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java @@ -36,6 +36,7 @@ import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.TskCoreException; /** diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java index f043463268..8b71494b76 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java @@ -46,6 +46,7 @@ import org.sleuthkit.autopsy.datamodel.hosts.AssociatePersonsMenuAction; import org.sleuthkit.autopsy.datamodel.hosts.MergeHostMenuAction; import org.sleuthkit.autopsy.datamodel.hosts.RemoveParentPersonAction; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.autopsy.corecomponents.SelectionResponder; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Host; @@ -221,7 +222,9 @@ public class HostNode extends DisplayableItemNode implements SelectionResponder{ * @param hosts The HostDataSources key. */ HostNode(HostDataSources hosts) { - this(Children.create(new HostGroupingChildren(HOST_DATA_SOURCES, hosts.getHost()), true), hosts.getHost()); + //super(Children.create(new FileSystemFactory(dsObjId), true), Lookups.singleton(NAME)); + this(Children.create(new FileSystemFactory(hosts.getHost()), true), hosts.getHost()); + //this(Children.create(new HostGroupingChildren(HOST_DATA_SOURCES, hosts.getHost()), true), hosts.getHost()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java index 19ebb3ea5a..ac7b67108e 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java @@ -43,6 +43,7 @@ import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode; import org.sleuthkit.autopsy.datamodel.SlackFileNode; import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode; import org.sleuthkit.autopsy.datamodel.VolumeNode; +import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.Content; @@ -102,7 +103,10 @@ class DirectoryTreeFilterChildren extends FilterNode.Children { return new Node[]{cloned}; } else if (origNode instanceof FileSize.FileSizeRootNode) { Node cloned = ((FileSize.FileSizeRootNode) origNode).clone(); - return new Node[]{cloned}; + return new Node[]{cloned}; + } else if (origNode instanceof FileSystemFactory.FileSystemTreeNode) { + Node cloned = ((FileSystemFactory.FileSystemTreeNode) origNode).clone(); + return new Node[]{cloned}; } else if (origNode == null || !(origNode instanceof DisplayableItemNode)) { return new Node[]{}; } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED index 11fbd0a9d8..ffe7393fa8 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED @@ -1,4 +1,12 @@ +# {0} - AbstractFileName +AnalysisResultTestIngestModule_process_start=Started {0} +AnalysisResultTestIngestModuleFactory_Description=Just makes a custom data artifact and analysis result +AnalysisResultTestIngestModuleFactory_Name=Analysis Result Test Module CTL_RunIngestAction=Run Ingest +CustomGPSIngestModule_Description=Just makes a few custom artifacts with GPS data +CustomGPSIngestModule_Name=Custom GPS Module +# {0} - AbstractFileName +CustomGPSIngestModule_process_start=Started {0} FileIngestPipeline_SaveResults_Activity=Saving Results IngestJobSettingsPanel.IngestModulesTableRenderer.info.message=A previous version of this ingest module has been run before on this data source. IngestJobSettingsPanel.IngestModulesTableRenderer.warning.message=This ingest module has been run before on this data source. @@ -159,3 +167,7 @@ IngestSettingsPanel.restartRequiredLabel.text=For this computer, a maximum of {0 IngestSettingsPanel.jLabelNumThreads.text=Number of threads to use for file ingest: IngestSettingsPanel.ingestWarningLabel.text=Ingest is ongoing, some settings will be unavailable until it finishes. ProfileSettingsPanel.ingestWarningLabel.text=Ingest is ongoing, some settings will be unavailable until it finishes. +# {0} - AbstractFileName +SampleDSIngestModule_process_start=Started {0} +SampleDSIngestModuleFactory_Description=Creates a custom artifact every half second +SampleDSIngestModuleFactory_Name=\ Sample DS Test Module diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED index ab71f5d900..6f035cda36 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED @@ -71,6 +71,8 @@ FileSystemColumnUtils.abstractFileColumns.sizeColLbl=Size FileSystemColumnUtils.abstractFileColumns.typeDirColLbl=Type(Dir) FileSystemColumnUtils.abstractFileColumns.typeMetaColLbl=Type(Meta) FileSystemColumnUtils.abstractFileColumns.useridColLbl=UserID +FileSystemColumnUtils.getContentName.dotDir=[current folder] +FileSystemColumnUtils.getContentName.dotDotDir=[parent folder] FileSystemColumnUtils.imageColumns.devID=Device ID FileSystemColumnUtils.imageColumns.sectorSize=Sector Size (Bytes) FileSystemColumnUtils.imageColumns.size=Size (Bytes) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index 82648f4db2..ce8a5fdf3e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -18,29 +18,41 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import java.sql.ResultSet; +import java.sql.SQLException; import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.logging.Level; import org.openide.util.NbBundle.Messages; import org.apache.commons.lang3.StringUtils; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; +import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.SlackFile; +import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.Volume; import org.sleuthkit.datamodel.VolumeSystem; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; /** * Utility class for creating consistent table data. */ -class FileSystemColumnUtils { +public class FileSystemColumnUtils { private static final Logger logger = Logger.getLogger(FileSystemColumnUtils.class.getName()); @@ -288,11 +300,25 @@ class FileSystemColumnUtils { return pool.getType().getName(); // We currently use the type name for both the name and type fields } else if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile)content; - return file.getName(); // GVDTODO handle . and .. from getContentDisplayName() + return convertDotDirName(file); } return content.getName(); } + + @NbBundle.Messages({ + "FileSystemColumnUtils.getContentName.dotDir=[current folder]", + "FileSystemColumnUtils.getContentName.dotDotDir=[parent folder]", + }) + public static String convertDotDirName(AbstractFile file) { + if (file.getName().equals("..")) { + return Bundle.FileSystemColumnUtils_getContentName_dotDotDir(); + } else if (file.getName().equals(".")) { + return Bundle.FileSystemColumnUtils_getContentName_dotDir(); + } + return file.getName(); + } + /** * Get the column keys for an abstract file object. * Only use this method if all rows contain AbstractFile objects. @@ -490,8 +516,27 @@ class FileSystemColumnUtils { * * @return List of content to add to the table. */ - static List getNextDisplayableContent(Content content) throws TskCoreException { + static List getDisplayableContentForTable(Content content) throws TskCoreException { + if (content instanceof AbstractFile) { + AbstractFile file = (AbstractFile)content; + // Skip known files if requested + if (UserPreferences.hideKnownFilesInDataSourcesTree() + && file.getKnown().equals(TskData.FileKnown.KNOWN)) { + return new ArrayList<>(); + } + + // Skip slack files if requested + if (UserPreferences.hideSlackFilesInDataSourcesTree() + && file instanceof SlackFile) { + return new ArrayList<>(); + } + } + + return getDisplayableContentForTableAndTree(content); + } + + static List getDisplayableContentForTableAndTree(Content content) throws TskCoreException { // If the given content is displayable, return it if (FileSystemColumnUtils.isDisplayable(content)) { return Arrays.asList(content); @@ -541,4 +586,70 @@ class FileSystemColumnUtils { return new ColumnKey(name, name, Bundle.FileSystemColumnUtils_noDescription()); } + public static List getVisibleTreeNodeChildren(Long contentId) throws TskCoreException, NoCurrentCaseException { + SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); + Content content = skCase.getContentById(contentId); + System.out.println("### getting displayable children for " + content.getClass().getSimpleName() + + " with ID " + contentId); + List originalChildren = content.getChildren(); + // TODO so much filtering + + // First, advance past anything we don't display (volume systems, file systems, root folders) + List treeChildren = new ArrayList<>(); + for (Content child : originalChildren) { + treeChildren.addAll(FileSystemColumnUtils.getDisplayableContentForTableAndTree(child)); // TODO known, slack + } + + // Filter out the . and .. directories + for (Iterator iter = treeChildren.listIterator(); iter.hasNext(); ) { + Content c = iter.next(); + if ((c instanceof AbstractFile) && ContentUtils.isDotDirectory((AbstractFile)c)) { + iter.remove(); + } + } + + // Filter out any files without children + for (Iterator iter = treeChildren.listIterator(); iter.hasNext(); ) { + Content c = iter.next(); + if (c instanceof AbstractFile && (! hasDisplayableContentChildren((AbstractFile)c))) { + iter.remove(); + } + } + + // sort? maybe sort earlier... + return treeChildren; + } + + private static boolean hasDisplayableContentChildren(AbstractFile file) { + if (file != null) { + try { + if (!file.hasChildren()) { + return false; + } + } catch (TskCoreException ex) { + + //logger.log(Level.SEVERE, "Error checking if the node has children, for content: " + c, ex); //NON-NLS + return false; + } + + String query = "SELECT COUNT(obj_id) AS count FROM " + + " ( SELECT obj_id FROM tsk_objects WHERE par_obj_id = " + file.getId() + " AND type = " + + TskData.ObjectType.ARTIFACT.getObjectType() + + " INTERSECT SELECT artifact_obj_id FROM blackboard_artifacts WHERE obj_id = " + file.getId() + + " AND (artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID() + + " OR artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID() + ") " + + " UNION SELECT obj_id FROM tsk_objects WHERE par_obj_id = " + file.getId() + + " AND type = " + TskData.ObjectType.ABSTRACTFILE.getObjectType() + ") AS OBJECT_IDS"; //NON-NLS; + + try (SleuthkitCase.CaseDbQuery dbQuery = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery(query)) { + ResultSet resultSet = dbQuery.getResultSet(); + if (resultSet.next()) { + return (0 < resultSet.getInt("count")); + } + } catch (TskCoreException | SQLException | NoCurrentCaseException ex) { + //logger.log(Level.SEVERE, "Error checking if the node has children, for content: " + c, ex); //NON-NLS + } + } + return false; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 19df7cfcd2..315fd2c225 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -21,16 +21,23 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; +import java.sql.ResultSet; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Comparator; +import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.DirectoryRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.ImageRowDTO; @@ -44,7 +51,9 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.PoolRowDTO; import static org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.getExtensionMediaType; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Directory; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; @@ -56,6 +65,7 @@ import org.sleuthkit.datamodel.Pool; import org.sleuthkit.datamodel.SlackFile; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.VirtualDirectory; import org.sleuthkit.datamodel.Volume; @@ -69,7 +79,7 @@ public class FileSystemDAO { private static final long CACHE_DURATION = 2; private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); - + private static final String FILE_SYSTEM_TYPE_ID = "FILE_SYSTEM"; private static FileSystemDAO instance = null; @@ -116,7 +126,7 @@ public class FileSystemDAO { parentName = parentContent.getName(); for (Content content : parentContent.getChildren()) { - contentForTable.addAll(FileSystemColumnUtils.getNextDisplayableContent(content)); + contentForTable.addAll(FileSystemColumnUtils.getDisplayableContentForTable(content)); } return fetchContentForTable(cacheKey, contentForTable, parentName); @@ -230,7 +240,7 @@ public class FileSystemDAO { rows.add(new FileRowDTO( file, file.getId(), - file.getName(), + FileSystemColumnUtils.convertDotDirName(file), file.getNameExtension(), getExtensionMediaType(file.getNameExtension()), file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC), @@ -290,6 +300,101 @@ public class FileSystemDAO { return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams)); } + + public TreeResultsDTO getDataSourcesForHost(Host host) throws ExecutionException { + try { + List> treeItemRows = new ArrayList<>(); + for (DataSource ds : Case.getCurrentCaseThrows().getSleuthkitCase().getHostManager().getDataSourcesForHost(host)) { + treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( + ds.getClass().getSimpleName(), + new FileSystemContentSearchParam(ds.getId()), + ds, + ds.getName(), + null + )); + // TODO sort + } + + return new TreeResultsDTO<>(treeItemRows); + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching images for host with ID " + host.getHostId(), ex); + } + } + + public TreeResultsDTO getSingleDataSource(long dataSourceObjId) throws ExecutionException { + try { + List> treeItemRows = new ArrayList<>(); + DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceObjId); + treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( + ds.getClass().getSimpleName(), + new FileSystemContentSearchParam(ds.getId()), + ds, + ds.getName(), + null + )); + + return new TreeResultsDTO<>(treeItemRows); + } catch (NoCurrentCaseException | TskCoreException | TskDataException ex) { + throw new ExecutionException("An error occurred while fetching data source with ID " + dataSourceObjId, ex); + } + } + + /** + * TODO + * + * @param contentId Object ID of parent content. + * + * @return + * + * @throws ExecutionException + */ + public TreeResultsDTO getDisplayableContentChildren(Long contentId) throws ExecutionException { + try { + + List treeChildren = FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId); + + List> treeItemRows = new ArrayList<>(); + for (Content child : treeChildren) { + Long countForNode = null; + if ((child instanceof AbstractFile) + && ! (child instanceof LocalFilesDataSource)) { + countForNode = new Long(child.getChildrenCount()); // TODO probably not correct + } + // public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) { + System.out.println("### Creating TreeItemDTO for " + child.getClass().getSimpleName() + + " child with name: " + child.getName() + + " and ID: " + child.getId()); + treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( + child.getClass().getSimpleName(), + new FileSystemContentSearchParam(child.getId()), + child, + child.getName(), + countForNode + )); + // TODO sort + } + + // get row dto's sorted by display name + //Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); + //List> treeItemRows = typeCounts.entrySet().stream() + // .map(entry -> { + // return new TreeResultsDTO.TreeItemDTO<>( + // BlackboardArtifact.Category.DATA_ARTIFACT.name(), + // new DataArtifactSearchParam(entry.getKey(), dataSourceId), + // entry.getKey().getTypeID(), + // entry.getKey().getDisplayName(), + // entry.getValue()); + // }) + // .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) + // .collect(Collectors.toList()); + + // return results + return new TreeResultsDTO<>(treeItemRows); + + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); + } + } /** * Handles fetching and paging of data for file types by mime type. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 2d42b4464e..33755d2c1b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; +import java.util.Optional; /** * A list of items to display in the tree. @@ -54,6 +55,7 @@ public class TreeResultsDTO { private final Long count; private final T typeData; private final Object id; + //private final Optional contentId; /** * Main constructor. @@ -76,6 +78,19 @@ public class TreeResultsDTO { this.typeData = typeData; } + //public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count, Long contentId) { + // this.typeId = typeId; + // this.id = id; + // this.displayName = displayName; + // this.count = count; + // this.typeData = typeData; + // this.contentId = Optional.ofNullable(contentId); + //} + + //public Optional getContentId() { + // return contentId; + //} + /** * @return The display name of this row. */ @@ -114,7 +129,5 @@ public class TreeResultsDTO { public String getTypeId() { return typeId; } - - } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED index 64865cab30..53573ae05c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED @@ -1,4 +1,5 @@ AnalysisResultTypeFactory_adHocName=Adhoc Results +FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content ImageNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files SearchResultRootNode_createSheet_childCount_displayName=Child Count SearchResultRootNode_createSheet_childCount_name=Child Count diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java index 03be438aa3..30287913de 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java @@ -52,9 +52,9 @@ public class DirectoryNode extends BaseNode { private void setIcon() { // set name, display name, and icon if (getRowDTO().getContent().isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.DELETED_FOLDER.getPath()); //NON-NLS } else { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.FOLDER.getPath()); //NON-NLS } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java index 292a86a381..0758b06404 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java @@ -207,17 +207,17 @@ public class FileNode extends AbstractNode implements ActionContext { LayoutFile lf = ((LayoutFileRowDTO) fileData).getLayoutFile(); switch (lf.getType()) { case CARVED: - setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); + setIconBaseWithExtension(NodeIconUtil.CARVED_FILE.getPath()); break; case LAYOUT_FILE: if (lf.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { - setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); } else { setIconBaseWithExtension(getIconForFileType(layoutFileRow.getExtensionMediaType())); } break; default: - setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); } } } @@ -236,9 +236,9 @@ public class FileNode extends AbstractNode implements ActionContext { AbstractFile file = fileData.getAbstractFile(); if (file.isDirNameFlagSet(TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { if (file.getType().equals(TSK_DB_FILES_TYPE_ENUM.CARVED)) { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.CARVED_FILE.getPath()); //NON-NLS } else { - this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS + this.setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); //NON-NLS } } else { this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType())); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java new file mode 100644 index 0000000000..e46d9b6f2d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -0,0 +1,589 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.nodes; + +import java.beans.PropertyChangeEvent; +import java.util.Optional; +import org.openide.nodes.Children; +import org.openide.nodes.Node; +import java.util.concurrent.ExecutionException; +import javax.swing.Action; +import org.openide.nodes.ChildFactory; +import org.openide.util.Lookup; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; +import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; +import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; +import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction; +import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.FileSystemColumnUtils; +import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.TreeContentItemDTO; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.CARVED_FILE; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FILE; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FOLDER; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FILE; +import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FOLDER; +import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; +import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext; +import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataSource; +import org.sleuthkit.datamodel.Host; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.LocalFilesDataSource; +import org.sleuthkit.datamodel.VirtualDirectory; +import org.sleuthkit.datamodel.Volume; +import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskDataException; +import org.sleuthkit.datamodel.TskData; + +/** + * Factory for displaying content in the data source section of the tree. + */ +public class FileSystemFactory extends TreeChildFactory { + + private Long contentId = null; + private Host host = null; + + /** + * Main constructor. + * + * @param contentId The object ID for this node + */ + public FileSystemFactory(Long contentId) { + System.out.println("### Creating FileSystemFactory with content ID: " + contentId); + this.contentId = contentId; + } + + public FileSystemFactory(Host host) { + System.out.println("### Creating FileSystemFactory with host ID: " + host.getHostId()); + this.host = host; + } + + @Override + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + if (host == null) { + TreeResultsDTO results = MainDAO.getInstance().getFileSystemDAO().getDisplayableContentChildren(contentId); + System.out.println("### getChildResults() for id: " + contentId + " has " + results.getItems().size() + " rows"); + return results; + } else { + TreeResultsDTO results = MainDAO.getInstance().getFileSystemDAO().getDataSourcesForHost(host); + System.out.println("### getChildResults() for host: " + host.getName() + " has " + results.getItems().size() + " rows"); + return results; + } + } + + @Override + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + try { + Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getContentById(rowData.getTypeData().getContentObjectId()); + if (content instanceof Image) { + return new ImageTreeNode((Image) content, rowData); + } else if (content instanceof Volume) { + return new VolumeTreeNode((Volume) content, rowData); + } else if (content instanceof Pool) { + return new PoolTreeNode((Pool) content, rowData); + } else if (content instanceof VirtualDirectory) { + return new VirtualDirectoryTreeNode((VirtualDirectory) content, rowData); + } else if (content instanceof LocalFilesDataSource) { + return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) content, rowData); + } else if (content instanceof Volume) { + return new VolumeTreeNode((Volume) content, rowData); + } else if (content instanceof AbstractFile) { + AbstractFile file = (AbstractFile) content; + if (file.isDir()) { + return new DirectoryTreeNode(file, rowData); + } else { + return new FileTreeNode(file, rowData); + } + } else { + return new UnsupportedTreeNode(content, rowData); + } + } catch (NoCurrentCaseException | TskCoreException ex) { + // TODO log + return null; + } + } + + @Override + public boolean isRefreshRequired(PropertyChangeEvent evt) { + String eventType = evt.getPropertyName(); + if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { + /** + * This is a stop gap measure until a different way of handling the + * closing of cases is worked out. Currently, remote events may be + * received for a case that is already closed. + */ + try { + Case.getCurrentCaseThrows(); + /** + * Due to some unresolved issues with how cases are closed, it + * is possible for the event to have a null oldValue if the + * event is a remote event. + */ + // TODO + final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue(); + //if (null != event && Category.DATA_ARTIFACT.equals(event.getBlackboardArtifactType().getCategory()) + // && !(DataArtifactDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) { + return true; + //} + } catch (NoCurrentCaseException notUsed) { + /** + * Case is closed, do nothing. + */ + } + } + return false; + } + + /** + * This factory is used to produce the single data source node under "Data Source Files" when + * grouping by person/host is selected. + */ + public static class DataSourceFactory extends TreeChildFactory { + private final long dataSourceId; + + public DataSourceFactory(long dataSourceId) { + System.out.println("### Creating DataSourceFactory with dataSourceId: " + dataSourceId); + this.dataSourceId = dataSourceId; + } + + @Override + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + // We're not really getting children here, just creating a node for the data source itself. + return MainDAO.getInstance().getFileSystemDAO().getSingleDataSource(dataSourceId); + } + + @Override + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + try { + DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceId); + if (ds instanceof Image) { + return new ImageTreeNode((Image) ds, rowData); + } else if (ds instanceof LocalFilesDataSource) { + return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) ds, rowData); + } else { + // There shouldn't be any other type + // TODO log + return null; + } + } catch (NoCurrentCaseException | TskCoreException | TskDataException ex) { + // TODO log + return null; + } + } + + + @Override + public boolean isRefreshRequired(PropertyChangeEvent evt) { + // TODO + return false; + } + + } + + /** + * Display name and count of a file system node in the tree. + */ + public abstract static class FileSystemTreeNode extends TreeNode implements ActionContext { + + + protected FileSystemTreeNode(String nodeName, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { + //super(nodeName, icon, itemData, children, lookup); + super(nodeName, "org/sleuthkit/autopsy/images/bank.png", itemData, children, lookup); + } + + protected static Children createChildrenForContent(Long contentId) { + try { + if (FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId).isEmpty()) { + return Children.LEAF; + } else { + return Children.create(new FileSystemFactory(contentId), true); + } + } catch (TskCoreException | NoCurrentCaseException ex) { + // TODO log + return Children.LEAF; + } + } + + @Override + public void respondSelection(DataResultTopComponent dataResultPanel) { + dataResultPanel.displayFileSystemContent(this.getItemData().getTypeData()); + } + + public abstract Node clone(); + + @Override + public Action[] getActions(boolean context) { + return ActionsFactory.getActions(this); + } + } + + static class ImageTreeNode extends FileSystemTreeNode { + Image image; + + ImageTreeNode(Image image, TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getDisplayName(), + NodeIconUtil.IMAGE.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + this.image = image; + System.out.println("### ImageTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new ImageTreeNode(image, getItemData()); + } + + @Override + public Optional getNodeSpecificActions() { + ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); + group.add(new ExtractUnallocAction( + Bundle.ImageNode_ExtractUnallocAction_text(), image)); + return Optional.of(group); + } + + @Override + public Optional getDataSourceForActions() { + return Optional.of(image); + } + + @Override + public Optional getNewWindowActionNode() { + return Optional.of(this); + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + } + + static class VolumeTreeNode extends FileSystemTreeNode { + Volume volume; + + VolumeTreeNode(Volume volume, TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getDisplayName(), + NodeIconUtil.VOLUME.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + this.volume = volume; + System.out.println("### VolumeTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new VolumeTreeNode(volume, getItemData()); + } + + @Override + public Optional getNodeSpecificActions() { + ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); + group.add(new ExtractUnallocAction( + Bundle.VolumnNode_ExtractUnallocAction_text(), volume)); + group.add(new FileSystemDetailsAction(volume)); + return Optional.of(group); + } + + @Override + public Optional getNewWindowActionNode() { + return Optional.of(this); + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + } + + static class PoolTreeNode extends FileSystemTreeNode { + Pool pool; + + PoolTreeNode(Pool pool, TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getDisplayName(), + NodeIconUtil.VOLUME.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + this.pool = pool; + System.out.println("### PoolTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new PoolTreeNode(pool, getItemData()); + } + } + + static class DirectoryTreeNode extends FileSystemTreeNode { + AbstractFile dir; + + DirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getDisplayName(), + getDirectoryIcon(dir), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + this.dir = dir; + System.out.println("### DirectoryTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + private static String getDirectoryIcon(AbstractFile dir) { + if (dir.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + return DELETED_FOLDER.getPath(); + } else { + return FOLDER.getPath(); + } + } + + public Node clone() { + return new DirectoryTreeNode(dir, getItemData()); + } + + @Override + public boolean supportsViewInTimeline() { + return true; + } + + @Override + public Optional getFileForViewInTimelineAction() { + return Optional.of(dir); + } + + @Override + public boolean supportsExtractActions() { + return true; + } + + @Override + public Optional getContentForRunIngestionModuleAction() { + return Optional.of(dir); + } + + @Override + public boolean supportsContentTagAction() { + return true; + } + } + + static abstract class SpecialDirectoryTreeNode extends FileSystemTreeNode { + AbstractFile dir; + + protected SpecialDirectoryTreeNode(AbstractFile dir, String nodeName, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { + super(nodeName, icon, itemData, children, lookup); + this.dir = dir; + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + + @Override + public Optional getNewWindowActionNode() { + return Optional.of(this); + } + + @Override + public boolean supportsExtractActions() { + return true; + } + + @Override + public Optional getContentForRunIngestionModuleAction() { + return Optional.of(dir); + } + + @Override + public Optional getContentForFileSearchAction() { + return Optional.of(dir); + } + } + + static class LocalDirectoryTreeNode extends SpecialDirectoryTreeNode { + LocalDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { + super(dir, + itemData.getDisplayName(), + NodeIconUtil.FOLDER.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + System.out.println("### LocalDirectoryTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new DirectoryTreeNode(dir, getItemData()); + } + } + + static class LocalFilesDataSourceTreeNode extends SpecialDirectoryTreeNode { + + LocalFilesDataSourceTreeNode(AbstractFile localFilesDataSource, TreeResultsDTO.TreeItemDTO itemData) { + super(localFilesDataSource, + itemData.getDisplayName(), + NodeIconUtil.VOLUME.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + System.out.println("### LocalFilesDataSourceTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new LocalFilesDataSourceTreeNode(dir, getItemData()); + } + + @Override + public Optional getDataSourceForActions() { + return Optional.of(dir); + } + } + + static class VirtualDirectoryTreeNode extends SpecialDirectoryTreeNode { + + VirtualDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { + super(dir, + itemData.getDisplayName(), + NodeIconUtil.VIRTUAL_DIRECTORY.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + System.out.println("### VirtualDirectoryTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new DirectoryTreeNode(dir, getItemData()); + } + } + + static class FileTreeNode extends FileSystemTreeNode { + AbstractFile file; + + FileTreeNode(AbstractFile file, TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getDisplayName(), + getFileIcon(file), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + this.file = file; + System.out.println("### FileTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new DirectoryTreeNode(file, getItemData()); + } + + private static String getFileIcon(AbstractFile file) { + if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.CARVED)) { + return CARVED_FILE.getPath(); + } else { + return DELETED_FILE.getPath(); + } + } else { + return FILE.getPath(); + } + } + + @Override + public boolean supportsViewInTimeline() { + return true; + } + + @Override + public Optional getFileForViewInTimelineAction() { + return Optional.of(file); + } + + @Override + public boolean supportsSourceContentViewerActions() { + return true; + } + + @Override + public Optional getNewWindowActionNode() { + return Optional.of(this); + } + + @Override + public Optional getExternalViewerActionNode() { + return Optional.of(this); + } + + @Override + public boolean supportsExtractActions() { + return true; + } + + @Override + public boolean supportsContentTagAction() { + return true; + } + + @Override + public Optional getFileForDirectoryBrowseMode() { + // TODO What is this? + return Optional.of(file); + } + + @Override + public Optional getExtractArchiveWithPasswordActionFile() { + // GVDTODO: HANDLE THIS ACTION IN A BETTER WAY!----- + // See JIRA-8099 + boolean isArchive = FileTypeExtensions.getArchiveExtensions().contains("." + file.getNameExtension().toLowerCase()); + boolean encryptionDetected = false; + try { + encryptionDetected = isArchive && file.getArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED).size() > 0; + } catch (TskCoreException ex) { + // TODO + } + + return encryptionDetected ? Optional.of(file) : Optional.empty(); + } + } + + @NbBundle.Messages({ + "FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content",}) + static class UnsupportedTreeNode extends FileSystemTreeNode { + Content content; + + UnsupportedTreeNode(Content content, TreeResultsDTO.TreeItemDTO itemData) { + super(Bundle.FileSystemFactory_UnsupportedTreeNode_displayName(), + NodeIconUtil.FILE.getPath(), + itemData, + createChildrenForContent(itemData.getTypeData().getContentObjectId()), + getDefaultLookup(itemData)); + this.content = content; + System.out.println("### UnsupportedTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); + } + + public Node clone() { + return new UnsupportedTreeNode(content, getItemData()); + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java index e509593c6a..03e781a794 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java @@ -43,7 +43,7 @@ public class ImageNode extends BaseNode { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); - setIconBaseWithExtension("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); //NON-NLS + setIconBaseWithExtension(NodeIconUtil.IMAGE.getPath()); //NON-NLS } @NbBundle.Messages({ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java new file mode 100644 index 0000000000..fc7931da93 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java @@ -0,0 +1,68 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.mainui.nodes; + +import org.sleuthkit.datamodel.*; // TODO + +/** + * Consolidates node paths shared between the result view table and the tree. + */ +class NodeIconUtil { + + final static NodeIconUtil FOLDER = new NodeIconUtil("org/sleuthkit/autopsy/images/Folder-icon.png"); + final static NodeIconUtil DELETED_FOLDER = new NodeIconUtil("org/sleuthkit/autopsy/images/folder-icon-deleted.png"); + final static NodeIconUtil VIRTUAL_DIRECTORY = new NodeIconUtil("org/sleuthkit/autopsy/images/folder-icon-virtual.png"); + final static NodeIconUtil CARVED_FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/carved-file-x-icon-16.png"); + final static NodeIconUtil DELETED_FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/file-icon-deleted.png"); + final static NodeIconUtil IMAGE = new NodeIconUtil("org/sleuthkit/autopsy/images/hard-drive-icon.jpg"); + final static NodeIconUtil VOLUME = new NodeIconUtil("org/sleuthkit/autopsy/images/vol-icon.png"); + final static NodeIconUtil POOL = new NodeIconUtil("org/sleuthkit/autopsy/images/pool-icon.png"); + final static NodeIconUtil FILE = new NodeIconUtil("org/sleuthkit/autopsy/images/file-icon.png"); + final static NodeIconUtil LOCAL_FILES_DATA_SOURCE = new NodeIconUtil("org/sleuthkit/autopsy/images/fileset-icon-16.png"); + //final static NodeIconUtil = new NodeIconUtil(""); + + private final String iconPath; + + private NodeIconUtil(String path) { + this.iconPath = path; + } + + String getPath() { + return iconPath; + } + + public static String getPathForContent(Content c) { + if (c instanceof Image) { + return IMAGE.getPath(); + } else if (c instanceof LocalFilesDataSource) { + return LOCAL_FILES_DATA_SOURCE.getPath(); + } else if (c instanceof Volume) { + return VOLUME.getPath(); + } else if (c instanceof Pool) { + return POOL.getPath(); + } else if (c instanceof AbstractFile) { + AbstractFile file = (AbstractFile) c; + if (((AbstractFile) c).isDir()) { + if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + return DELETED_FOLDER.getPath(); + } else { + return FOLDER.getPath(); + } + } else { + if (file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { + if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.CARVED)) { + return CARVED_FILE.getPath(); + } else { + return DELETED_FILE.getPath(); + } + } else { + return FILE.getPath(); + } + } + } + return FILE.getPath(); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java index a59a829dce..8454a7b8d1 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java @@ -43,6 +43,6 @@ public class PoolNode extends BaseNode { String name = row.getContent().getType().getName(); setDisplayName(name); setShortDescription(name); - setIconBaseWithExtension("org/sleuthkit/autopsy/images/pool-icon.png"); + setIconBaseWithExtension(NodeIconUtil.POOL.getPath()); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java index 9efc1796c5..bf3425e896 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java @@ -42,7 +42,7 @@ public class VolumeNode extends BaseNode { */ public VolumeNode(SearchResultsDTO results, VolumeRowDTO row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); - setIconBaseWithExtension("org/sleuthkit/autopsy/images/vol-icon.png"); //NON-NLS + setIconBaseWithExtension(NodeIconUtil.VOLUME.getPath()); //NON-NLS // use first cell value for display name String displayName = row.getCellValues().size() > 0 From 2155f0b6d21f4349b71c037d05ca6e24a43f914b Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 17 Nov 2021 20:36:47 -0500 Subject: [PATCH 046/142] fixes --- Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java | 3 ++- Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 72a5011673..7267b61c54 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -47,7 +47,8 @@ public class MainDAO extends AbstractDAO { private static final Set INGEST_MODULE_EVENTS = EnumSet.of( IngestManager.IngestModuleEvent.CONTENT_CHANGED, - IngestManager.IngestModuleEvent.DATA_ADDED + IngestManager.IngestModuleEvent.DATA_ADDED, + IngestManager.IngestModuleEvent.FILE_DONE ); private static final Set QUEUED_CASE_EVENTS = ImmutableSet.of( diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 354e30c4cf..5f325a8b2e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -705,7 +705,7 @@ public class ViewsDAO extends AbstractDAO { } } - if (fileExtensionDsMap.isEmpty() || mimeTypeDsMap.isEmpty() || fileSizeDsMap.isEmpty()) { + if (fileExtensionDsMap.isEmpty() && mimeTypeDsMap.isEmpty() && fileSizeDsMap.isEmpty()) { return Collections.emptyList(); } From 50506baa4a970e42581e07f899af34ac5c3299ac Mon Sep 17 00:00:00 2001 From: apriestman Date: Thu, 18 Nov 2021 14:28:22 -0500 Subject: [PATCH 047/142] Refactoring node icons. Working on actions. --- .../autopsy/mainui/datamodel/FileRowDTO.java | 5 +- .../mainui/datamodel/FileSystemDAO.java | 2 +- .../mainui/datamodel/MediaTypeUtils.java | 102 ++++++++++++++++++ .../mainui/datamodel/TreeResultsDTO.java | 14 --- .../autopsy/mainui/datamodel/ViewsDAO.java | 33 +----- .../mainui/nodes/Bundle.properties-MERGED | 2 + .../autopsy/mainui/nodes/FileNode.java | 46 +------- .../mainui/nodes/FileSystemFactory.java | 46 +++++--- 8 files changed, 142 insertions(+), 108 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java index 4ef8cfb935..ddf98f538f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileRowDTO.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.ExtensionMediaType; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.LayoutFile; import org.sleuthkit.datamodel.SlackFile; @@ -29,10 +30,6 @@ import org.sleuthkit.datamodel.TskData; */ public class FileRowDTO extends BaseRowDTO { - public enum ExtensionMediaType { - IMAGE, VIDEO, AUDIO, DOC, EXECUTABLE, TEXT, WEB, PDF, ARCHIVE, UNCATEGORIZED - } - private static String TYPE_ID = "FILE"; public static String getTypeIdForClass() { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 315fd2c225..e5236527c1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -48,7 +48,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.VirtualDirectoryR import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.PoolRowDTO; -import static org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.getExtensionMediaType; +import static org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.getExtensionMediaType; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java new file mode 100644 index 0000000000..f6c44d2433 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MediaTypeUtils.java @@ -0,0 +1,102 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import org.apache.commons.lang3.StringUtils; +import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; + +/** + * + */ +public class MediaTypeUtils { + + public enum ExtensionMediaType { + IMAGE, VIDEO, AUDIO, DOC, EXECUTABLE, TEXT, WEB, PDF, ARCHIVE, UNCATEGORIZED + } + + public static ExtensionMediaType getExtensionMediaType(String ext) { + if (StringUtils.isBlank(ext)) { + return ExtensionMediaType.UNCATEGORIZED; + } else { + ext = "." + ext; + } + if (FileTypeExtensions.getImageExtensions().contains(ext)) { + return ExtensionMediaType.IMAGE; + } else if (FileTypeExtensions.getVideoExtensions().contains(ext)) { + return ExtensionMediaType.VIDEO; + } else if (FileTypeExtensions.getAudioExtensions().contains(ext)) { + return ExtensionMediaType.AUDIO; + } else if (FileTypeExtensions.getDocumentExtensions().contains(ext)) { + return ExtensionMediaType.DOC; + } else if (FileTypeExtensions.getExecutableExtensions().contains(ext)) { + return ExtensionMediaType.EXECUTABLE; + } else if (FileTypeExtensions.getTextExtensions().contains(ext)) { + return ExtensionMediaType.TEXT; + } else if (FileTypeExtensions.getWebExtensions().contains(ext)) { + return ExtensionMediaType.WEB; + } else if (FileTypeExtensions.getPDFExtensions().contains(ext)) { + return ExtensionMediaType.PDF; + } else if (FileTypeExtensions.getArchiveExtensions().contains(ext)) { + return ExtensionMediaType.ARCHIVE; + } else { + return ExtensionMediaType.UNCATEGORIZED; + } + } + + /** + * Gets the path to the icon file that should be used to visually represent + * an AbstractFile, using the file name extension to select the icon. + * + * @param file An AbstractFile. + * + * @return An icon file path. + */ + public static String getIconForFileType(ExtensionMediaType fileType) { + if (fileType == null) { + return "org/sleuthkit/autopsy/images/file-icon.png"; + } + + switch (fileType) { + case IMAGE: + return "org/sleuthkit/autopsy/images/image-file.png"; + case VIDEO: + return "org/sleuthkit/autopsy/images/video-file.png"; + case AUDIO: + return "org/sleuthkit/autopsy/images/audio-file.png"; + case DOC: + return "org/sleuthkit/autopsy/images/doc-file.png"; + case EXECUTABLE: + return "org/sleuthkit/autopsy/images/exe-file.png"; + case TEXT: + return "org/sleuthkit/autopsy/images/text-file.png"; + case WEB: + return "org/sleuthkit/autopsy/images/web-file.png"; + case PDF: + return "org/sleuthkit/autopsy/images/pdf-file.png"; + case ARCHIVE: + return "org/sleuthkit/autopsy/images/archive-file.png"; + default: + case UNCATEGORIZED: + return "org/sleuthkit/autopsy/images/file-icon.png"; + } + } + + private MediaTypeUtils() { + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 33755d2c1b..923ed0390a 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -55,7 +55,6 @@ public class TreeResultsDTO { private final Long count; private final T typeData; private final Object id; - //private final Optional contentId; /** * Main constructor. @@ -77,19 +76,6 @@ public class TreeResultsDTO { this.count = count; this.typeData = typeData; } - - //public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count, Long contentId) { - // this.typeId = typeId; - // this.id = id; - // this.displayName = displayName; - // this.count = count; - // this.typeData = typeData; - // this.contentId = Optional.ofNullable(contentId); - //} - - //public Optional getContentId() { - // return contentId; - //} /** * @return The display name of this row. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 6f88d47289..14dfacdff5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -41,7 +41,7 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTree; import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree; import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; -import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.ExtensionMediaType; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; @@ -76,35 +76,6 @@ public class ViewsDAO { return instance; } - static ExtensionMediaType getExtensionMediaType(String ext) { - if (StringUtils.isBlank(ext)) { - return ExtensionMediaType.UNCATEGORIZED; - } else { - ext = "." + ext; - } - if (FileTypeExtensions.getImageExtensions().contains(ext)) { - return ExtensionMediaType.IMAGE; - } else if (FileTypeExtensions.getVideoExtensions().contains(ext)) { - return ExtensionMediaType.VIDEO; - } else if (FileTypeExtensions.getAudioExtensions().contains(ext)) { - return ExtensionMediaType.AUDIO; - } else if (FileTypeExtensions.getDocumentExtensions().contains(ext)) { - return ExtensionMediaType.DOC; - } else if (FileTypeExtensions.getExecutableExtensions().contains(ext)) { - return ExtensionMediaType.EXECUTABLE; - } else if (FileTypeExtensions.getTextExtensions().contains(ext)) { - return ExtensionMediaType.TEXT; - } else if (FileTypeExtensions.getWebExtensions().contains(ext)) { - return ExtensionMediaType.WEB; - } else if (FileTypeExtensions.getPDFExtensions().contains(ext)) { - return ExtensionMediaType.PDF; - } else if (FileTypeExtensions.getArchiveExtensions().contains(ext)) { - return ExtensionMediaType.ARCHIVE; - } else { - return ExtensionMediaType.UNCATEGORIZED; - } - } - private SleuthkitCase getCase() throws NoCurrentCaseException { return Case.getCurrentCaseThrows().getSleuthkitCase(); } @@ -633,7 +604,7 @@ public class ViewsDAO { file.getId(), file.getName(), file.getNameExtension(), - getExtensionMediaType(file.getNameExtension()), + MediaTypeUtils.getExtensionMediaType(file.getNameExtension()), file.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC), file.getType(), cellValues)); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED index 53573ae05c..76636c3af4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED @@ -1,4 +1,6 @@ AnalysisResultTypeFactory_adHocName=Adhoc Results +FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files +FileSystemFactory.FileSystemTreeNode.OpenFileSearchByAttr.text=Open File Search by Attributes FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content ImageNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files SearchResultRootNode_createSheet_childCount_displayName=Child Count diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java index 0758b06404..c508c5e0df 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java @@ -29,7 +29,7 @@ import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.ColumnKey; -import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.LayoutFileRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.SlackFileRowDTO; import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext; @@ -47,44 +47,6 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; */ public class FileNode extends AbstractNode implements ActionContext { - /** - * Gets the path to the icon file that should be used to visually represent - * an AbstractFile, using the file name extension to select the icon. - * - * @param file An AbstractFile. - * - * @return An icon file path. - */ - static String getIconForFileType(ExtensionMediaType fileType) { - if (fileType == null) { - return "org/sleuthkit/autopsy/images/file-icon.png"; - } - - switch (fileType) { - case IMAGE: - return "org/sleuthkit/autopsy/images/image-file.png"; - case VIDEO: - return "org/sleuthkit/autopsy/images/video-file.png"; - case AUDIO: - return "org/sleuthkit/autopsy/images/audio-file.png"; - case DOC: - return "org/sleuthkit/autopsy/images/doc-file.png"; - case EXECUTABLE: - return "org/sleuthkit/autopsy/images/exe-file.png"; - case TEXT: - return "org/sleuthkit/autopsy/images/text-file.png"; - case WEB: - return "org/sleuthkit/autopsy/images/web-file.png"; - case PDF: - return "org/sleuthkit/autopsy/images/pdf-file.png"; - case ARCHIVE: - return "org/sleuthkit/autopsy/images/archive-file.png"; - default: - case UNCATEGORIZED: - return "org/sleuthkit/autopsy/images/file-icon.png"; - } - } - private final boolean directoryBrowseMode; private final FileRowDTO fileData; private final List columns; @@ -116,7 +78,7 @@ public class FileNode extends AbstractNode implements ActionContext { this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-icon-deleted.png"); //NON-NLS } } else { - this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType())); + this.setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(fileData.getExtensionMediaType())); } } @@ -213,7 +175,7 @@ public class FileNode extends AbstractNode implements ActionContext { if (lf.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC)) { setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); } else { - setIconBaseWithExtension(getIconForFileType(layoutFileRow.getExtensionMediaType())); + setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(layoutFileRow.getExtensionMediaType())); } break; default: @@ -241,7 +203,7 @@ public class FileNode extends AbstractNode implements ActionContext { this.setIconBaseWithExtension(NodeIconUtil.DELETED_FILE.getPath()); //NON-NLS } } else { - this.setIconBaseWithExtension(getIconForFileType(fileData.getExtensionMediaType())); + this.setIconBaseWithExtension(MediaTypeUtils.getIconForFileType(fileData.getExtensionMediaType())); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java index e46d9b6f2d..38cf8ef412 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -33,14 +33,15 @@ import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction; +import org.sleuthkit.autopsy.directorytree.FileSearchAction; import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemColumnUtils; +import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; -import org.sleuthkit.autopsy.mainui.datamodel.TreeContentItemDTO; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.CARVED_FILE; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FILE; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FOLDER; @@ -53,6 +54,7 @@ import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.LocalDirectory; import org.sleuthkit.datamodel.LocalFilesDataSource; import org.sleuthkit.datamodel.VirtualDirectory; import org.sleuthkit.datamodel.Volume; @@ -108,11 +110,13 @@ public class FileSystemFactory extends TreeChildFactory implements ActionContext { protected FileSystemTreeNode(String nodeName, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { - //super(nodeName, icon, itemData, children, lookup); - super(nodeName, "org/sleuthkit/autopsy/images/bank.png", itemData, children, lookup); + super(nodeName, icon, itemData, children, lookup); + //super(nodeName, "org/sleuthkit/autopsy/images/bank.png", itemData, children, lookup); } protected static Children createChildrenForContent(Long contentId) { @@ -253,7 +260,7 @@ public class FileSystemFactory extends TreeChildFactory getNodeSpecificActions() { ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); group.add(new ExtractUnallocAction( - Bundle.ImageNode_ExtractUnallocAction_text(), image)); + Bundle.FileSystemFactory_FileSystemTreeNode_ExtractUnallocAction_text(), image)); + group.add(new FileSearchAction(Bundle.FileSystemFactory_FileSystemTreeNode_OpenFileSearchByAttr_text(), image.getId())); return Optional.of(group); } @@ -294,7 +302,7 @@ public class FileSystemFactory extends TreeChildFactory Date: Thu, 18 Nov 2021 20:14:40 -0500 Subject: [PATCH 048/142] fixes --- .../mainui/datamodel/FileSystemDAO.java | 66 ++++++++++++++++--- .../datamodel/events/DAOEventUtils.java | 38 ++++++++--- .../events/FileSystemContentEvent.java | 7 ++ 3 files changed, 93 insertions(+), 18 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index c4214c3f70..70f24743a7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -65,6 +65,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Directory; +import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LayoutFile; @@ -78,6 +79,7 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.VirtualDirectory; import org.sleuthkit.datamodel.Volume; +import org.sleuthkit.datamodel.VolumeSystem; /** * @@ -123,7 +125,9 @@ public class FileSystemDAO extends AbstractDAO { return false; } - return key.getContentObjectId() == ((FileSystemContentEvent) daoEvent).getContentObjectId(); + FileSystemContentEvent contentEvt = (FileSystemContentEvent) daoEvent; + + return contentEvt.getContentObjectId() == null || key.getContentObjectId().equals(contentEvt.getContentObjectId()); } public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, DAOEvent daoEvent) { @@ -342,22 +346,56 @@ public class FileSystemDAO extends AbstractDAO { } } + /** + * In instances where parents are hidden, refresh the entire tree. + * + * @param parentContent The parent content. + * + * @return True if full tree should be refreshed. + */ + private boolean invalidatesAllFileSystem(Content parentContent) { + if (parentContent instanceof VolumeSystem || parentContent instanceof FileSystem) { + return true; + } + + if (parentContent instanceof Directory) { + Directory dir = (Directory) parentContent; + return dir.isRoot() && !dir.getName().equals(".") && !dir.getName().equals(".."); + } + + if (parentContent instanceof LocalDirectory) { + return ((LocalDirectory) parentContent).isRoot(); + } + + return false; + } + @Override List handleAutopsyEvent(Collection evts) { Set affectedPersons = new HashSet<>(); Set affectedHosts = new HashSet<>(); Set affectedParentContent = new HashSet<>(); + boolean refreshAllContent = false; for (PropertyChangeEvent evt : evts) { - Content content = DAOEventUtils.getContentFromEvt(evt); - if (content instanceof AbstractContent) { + Content content = DAOEventUtils.getDerivedContentFromEvt(evt); + if (content != null) { + Content parentContent; try { - Optional parentId = ((AbstractContent) content).getParentId(); - if (parentId.isPresent()) { - affectedParentContent.add(parentId.get()); - } + parentContent = content.getParent(); } catch (TskCoreException ex) { - logger.log(Level.WARNING, "An exception occurred getting the parent id of content: " + content.getId(), ex); + logger.log(Level.WARNING, "Unable to get parent content of content with id: " + content.getId(), ex); + continue; + } + + if (parentContent == null) { + continue; + } + + if (invalidatesAllFileSystem(parentContent)) { + refreshAllContent = true; + } else { + affectedParentContent.add(parentContent.getId()); } } else if (evt instanceof DataSourceAddedEvent) { Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource()); @@ -382,6 +420,8 @@ public class FileSystemDAO extends AbstractDAO { } } + final boolean triggerFullRefresh = refreshAllContent; + // GVDTODO handling null ids versus the 'No Persons' option ConcurrentMap, BaseSearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); concurrentMap.forEach((k, v) -> { @@ -398,16 +438,22 @@ public class FileSystemDAO extends AbstractDAO { } } else if (searchParams instanceof FileSystemContentSearchParam) { FileSystemContentSearchParam contentParams = (FileSystemContentSearchParam) searchParams; - if (affectedParentContent.contains(contentParams)) { + if (triggerFullRefresh + || contentParams.getContentObjectId() == null + || affectedParentContent.contains(contentParams.getContentObjectId())) { concurrentMap.remove(k); } } }); + Stream fileEvts = triggerFullRefresh + ? Stream.of(new FileSystemContentEvent(null)) + : affectedParentContent.stream().map(id -> new FileSystemContentEvent(id)); + return Stream.of( affectedPersons.stream().map(id -> new FileSystemPersonEvent(id)), affectedHosts.stream().map(id -> new FileSystemHostEvent(id)), - affectedParentContent.stream().map(id -> new FileSystemContentEvent(id)) + fileEvts ) .flatMap(s -> s) .collect(Collectors.toList()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java index c8f6fa5370..3c1d81cc85 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java @@ -32,8 +32,8 @@ import org.sleuthkit.datamodel.Content; public class DAOEventUtils { /** - * Returns the content from the ModuleContentEvent. If the event does not - * contain a ModuleContentEvent or the event does not contain Content, null + * Returns the content from the event. If the event does not + * contain a event or the event does not contain Content, null * is returned. * * @param evt The event @@ -42,12 +42,9 @@ public class DAOEventUtils { */ public static Content getContentFromEvt(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); - if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) - && (evt.getOldValue() instanceof ModuleContentEvent) - && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { - - return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); - + Content derivedContent = getDerivedContentFromEvt(evt); + if (derivedContent != null) { + return derivedContent; } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) && (evt.getNewValue() instanceof Content)) { return (Content) evt.getNewValue(); @@ -55,7 +52,32 @@ public class DAOEventUtils { return null; } } + + /** + * Returns the content from the ModuleContentEvent. If the event does not + * contain a event or the event does not contain Content, null + * is returned. + * @param evt The event + * @return The inner content or null if no content. + */ + public static Content getDerivedContentFromEvt(PropertyChangeEvent evt) { + String eventName = evt.getPropertyName(); + if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) + && (evt.getOldValue() instanceof ModuleContentEvent) + && ((ModuleContentEvent) evt.getOldValue()).getSource() instanceof Content) { + return (Content) ((ModuleContentEvent) evt.getOldValue()).getSource(); + + } else { + return null; + } + } + + /** + * Returns a file in the event if a file is found in the event. + * @param evt The autopsy event. + * @return The inner file or null if no file found. + */ public static AbstractFile getFileFromEvt(PropertyChangeEvent evt) { Content content = getContentFromEvt(evt); return (content instanceof AbstractFile) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java index 1cb519bd16..1474c95f04 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java @@ -24,8 +24,15 @@ import java.util.Objects; * An event that affects the given parent content. */ public class FileSystemContentEvent implements DAOEvent { + private final Long contentObjectId; + /** + * Main constructor. + * + * @param contentObjectId The parent content object id. If null, performs + * full refresh of file tree. + */ public FileSystemContentEvent(Long contentObjectId) { this.contentObjectId = contentObjectId; } From cde539fbbe685b262875d1450bf57fe412b20a8d Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 19 Nov 2021 09:37:54 -0500 Subject: [PATCH 049/142] integrate comms accounts --- .../mainui/datamodel/AnalysisResultDAO.java | 33 ++-- .../mainui/datamodel/CommAccountsDAO.java | 154 +++++++++++------- .../mainui/datamodel/DataArtifactDAO.java | 62 +++---- .../mainui/datamodel/FileSystemDAO.java | 22 +-- .../autopsy/mainui/datamodel/MainDAO.java | 3 +- .../mainui/datamodel/OsAccountsDAO.java | 19 ++- .../autopsy/mainui/datamodel/TagsDAO.java | 11 +- .../autopsy/mainui/datamodel/ViewsDAO.java | 35 ++-- .../datamodel/events/CommAccountsEvent.java | 78 +++++++++ 9 files changed, 270 insertions(+), 147 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 7709ac26fb..db38727282 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -822,8 +822,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public static class AnalysisResultFetcher extends DAOFetcher { - private final AnalysisResultDAO dao; - /** * Main constructor. * @@ -831,17 +829,20 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public AnalysisResultFetcher(AnalysisResultSearchParam params) { super(params); - this.dao = MainDAO.getInstance().getAnalysisResultDAO(); + } + + protected AnalysisResultDAO getDAO() { + return MainDAO.getInstance().getAnalysisResultDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return dao.getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return dao.isAnalysisResultsInvalidating(this.getParameters(), evt); + return getDAO().isAnalysisResultsInvalidating(this.getParameters(), evt); } } @@ -850,8 +851,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public static class AnalysisResultSetFetcher extends DAOFetcher { - private final AnalysisResultDAO dao; - /** * Main constructor. * @@ -859,17 +858,20 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public AnalysisResultSetFetcher(AnalysisResultSetSearchParam params) { super(params); - this.dao = MainDAO.getInstance().getAnalysisResultDAO(); + } + + protected AnalysisResultDAO getDAO() { + return MainDAO.getInstance().getAnalysisResultDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return dao.getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return dao.isAnalysisResultsSetInvalidating(this.getParameters(), evt); + return getDAO().isAnalysisResultsSetInvalidating(this.getParameters(), evt); } } @@ -878,8 +880,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public static class KeywordHitResultFetcher extends DAOFetcher { - private final AnalysisResultDAO dao; - /** * Main constructor. * @@ -887,18 +887,21 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { */ public KeywordHitResultFetcher(KeywordHitSearchParam params) { super(params); - this.dao = MainDAO.getInstance().getAnalysisResultDAO(); + } + + protected AnalysisResultDAO getDAO() { + return MainDAO.getInstance().getAnalysisResultDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return dao.getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { // GVDTODO - throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + return true; } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index 720ff4ece6..8a7cede394 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -22,18 +22,28 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.sql.SQLException; +import java.util.Collection; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard; @@ -43,15 +53,16 @@ import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** - * Provides information to populate the results viewer for data in the + * Provides information to populate the results viewer for data in the * Communication Accounts section. */ @Messages({"CommAccountsDAO.fileColumns.noDescription=No Description"}) -public class CommAccountsDAO { +public class CommAccountsDAO extends AbstractDAO { + private static final Logger logger = Logger.getLogger(CommAccountsDAO.class.getName()); private static final int CACHE_SIZE = Account.Type.PREDEFINED_ACCOUNT_TYPES.size(); // number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; - private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; + private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); private static CommAccountsDAO instance = null; @@ -70,14 +81,14 @@ public class CommAccountsDAO { } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { throw new IllegalArgumentException("Data source id must be greater than 0 or null"); } - + SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); if (hardRefresh) { this.searchParamsCache.invalidate(searchParams); } return searchParamsCache.get(searchParams, () -> fetchCommAccountsDTOs(searchParams)); - } + } /** * Returns a list of paged artifacts. @@ -103,10 +114,10 @@ public class CommAccountsDAO { Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Long dataSourceId = cacheKey.getParamData().getDataSourceId(); BlackboardArtifact.Type artType = BlackboardArtifact.Type.TSK_ACCOUNT; - - if ( (cacheKey.getStartItem() == 0) // offset is zero AND - && ( (cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max - || (cacheKey.getMaxResultsCount() == null)) ) { // OR max number of results was not specified + + if ((cacheKey.getStartItem() == 0) // offset is zero AND + && ((cacheKey.getMaxResultsCount() != null && currentPageSize < cacheKey.getMaxResultsCount()) // number of results is less than max + || (cacheKey.getMaxResultsCount() == null))) { // OR max number of results was not specified return currentPageSize; } else { if (dataSourceId != null) { @@ -114,9 +125,9 @@ public class CommAccountsDAO { } else { return blackboard.getArtifactsCount(artType.getTypeID()); } - } + } } - + @NbBundle.Messages({"CommAccounts.name.text=Communication Accounts"}) private SearchResultsDTO fetchCommAccountsDTOs(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException, SQLException { @@ -125,22 +136,78 @@ public class CommAccountsDAO { Blackboard blackboard = skCase.getBlackboard(); Account.Type type = cacheKey.getParamData().getType(); Long dataSourceId = cacheKey.getParamData().getDataSourceId(); - List allArtifacts = blackboard.getArtifacts(BlackboardArtifact.Type.TSK_ACCOUNT, + List allArtifacts = blackboard.getArtifacts(BlackboardArtifact.Type.TSK_ACCOUNT, BlackboardAttribute.Type.TSK_ACCOUNT_TYPE, type.getTypeName(), dataSourceId, - false); // GVDTODO handle approved/rejected account actions - + false); // GVDTODO handle approved/rejected account actions + // get current page of artifacts List pagedArtifacts = getPaged(allArtifacts, cacheKey); - + // Populate the attributes for paged artifacts in the list. This is done using one database call as an efficient way to - // load many artifacts/attributes at once. + // load many artifacts/attributes at once. blackboard.loadBlackboardAttributes(pagedArtifacts); - + DataArtifactDAO dataArtDAO = MainDAO.getInstance().getDataArtifactsDAO(); BlackboardArtifactDAO.TableData tableData = dataArtDAO.createTableData(BlackboardArtifact.Type.TSK_ACCOUNT, pagedArtifacts); return new DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type.TSK_ACCOUNT, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), allArtifacts.size()); } - + + @Override + void clearCaches() { + this.searchParamsCache.invalidateAll();; + } + + @Override + List handleAutopsyEvent(Collection evts) { + // maps account type to the data sources affected + Map> commAccountsAffected = new HashMap<>(); + try { + for (PropertyChangeEvent evt : evts) { + String eventType = evt.getPropertyName(); + if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { + ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); + if (null != eventData + && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + + // check that the update is for the same account type + for (BlackboardArtifact artifact : eventData.getArtifacts()) { + BlackboardAttribute typeAttr = artifact.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); + commAccountsAffected.computeIfAbsent(typeAttr.getValueString(), (k) -> new HashSet<>()) + .add(artifact.getDataSourceObjectID()); + } + } + } + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to properly handle module data event.", ex); + } + + // invalidate cache entries that are affected by events + ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); + concurrentMap.forEach((k, v) -> { + Object objectKey = k.getParamData(); + if (objectKey instanceof CommAccountsSearchParams) { + CommAccountsSearchParams commAcctKey = (CommAccountsSearchParams) objectKey; + Set dsIdsAffected = commAccountsAffected.get(commAcctKey.getType().getTypeName()); + if (dsIdsAffected != null + && (commAcctKey.getDataSourceId() == null + || dsIdsAffected.contains(commAcctKey.getDataSourceId()))) { + + concurrentMap.remove(k); + } + } + }); + + return commAccountsAffected.entrySet().stream() + .flatMap(entry -> entry.getValue().stream().map(dsId -> new CommAccountsEvent(entry.getKey(), dsId))) + .collect(Collectors.toList()); + } + + private boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + + } + /** * Handles fetching and paging of data for communication accounts. */ @@ -155,53 +222,18 @@ public class CommAccountsDAO { super(params); } - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getCommAccountsDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + protected CommAccountsDAO getDAO() { + return MainDAO.getInstance().getCommAccountsDAO(); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - CommAccountsSearchParams params = this.getParameters(); - String eventType = evt.getPropertyName(); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { + return getDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + } - if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { - /** - * Checking for a current case is a stop gap measure until a - * different way of handling the closing of cases is worked out. - * Currently, remote events may be received for a case that is - * already closed. - */ - try { - Case.getCurrentCaseThrows(); - /** - * Even with the check above, it is still possible that the - * case will be closed in a different thread before this - * code executes. If that happens, it is possible for the - * event to have a null oldValue. - */ - ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); - if (null != eventData - && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { - - // check that the update is for the same account type - for (BlackboardArtifact artifact : eventData.getArtifacts()) { - for (BlackboardAttribute atribute : artifact.getAttributes()) { - if (atribute.getAttributeType() == BlackboardAttribute.Type.TSK_ACCOUNT_TYPE) { - if (atribute.getValueString().equals(params.getType().toString())) { - return true; - } - } - } - } - } - } catch (NoCurrentCaseException notUsed) { - // Case is closed, do nothing. - } catch (TskCoreException ex) { - // There is nothing we can do with the exception. - } - } - return false; + @Override + public boolean isRefreshRequired(DAOEvent evt) { + return getDAO().isCommAcctInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 34edc5d408..45b47025d4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -53,16 +53,16 @@ import org.sleuthkit.datamodel.TskCoreException; * DAO for providing data about data artifacts to populate the results viewer. */ public class DataArtifactDAO extends BlackboardArtifactDAO { - + private static Logger logger = Logger.getLogger(DataArtifactDAO.class.getName()); - + private static DataArtifactDAO instance = null; - + synchronized static DataArtifactDAO getInstance() { if (instance == null) { instance = new DataArtifactDAO(); } - + return instance; } @@ -72,27 +72,27 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { public static Set getIgnoredTreeTypes() { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - + private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); - + private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); - + String pagedWhereClause = getWhereClause(cacheKey); - + List arts = new ArrayList<>(); arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause)); blackboard.loadBlackboardAttributes(arts); - + long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + TableData tableData = createTableData(artType, arts); return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } - + @Override RowDTO createRow(BlackboardArtifact artifact, Content srcContent, Content linkedFile, boolean isTimelineSupported, List cellValues, long id) throws IllegalArgumentException { if (!(artifact instanceof DataArtifact)) { @@ -100,25 +100,25 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } return new DataArtifactRowDTO((DataArtifact) artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id); } - + public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); - + if (artType == null || artType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT || (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0)) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Artifact type must be non-null and data artifact. Data source id must be null or > 0. " + "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); if (hardRefresh) { this.dataArtifactCache.invalidate(searchParams); } - + return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } - + public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { if (!(eventData instanceof DataArtifactEvent)) { return false; @@ -128,7 +128,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { && (key.getDataSourceId() == null || (key.getDataSourceId() == dataArtEvt.getDataSourceId())); } } - + public void dropDataArtifactCache() { dataArtifactCache.invalidateAll(); } @@ -163,17 +163,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { // return results return new TreeResultsDTO<>(treeItemRows); - + } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); } } - + @Override void clearCaches() { this.dataArtifactCache.invalidateAll(); } - + @Override List handleAutopsyEvent(Collection evts) { // get a grouping of artifacts mapping the artifact type id to data source id. @@ -193,7 +193,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); } }); - + // don't do anything else if no relevant events if (artifactTypeDataSourceMap.isEmpty()) { return Collections.emptyList(); @@ -219,7 +219,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { toRet.add(new DataArtifactEvent(artTypeId, dsObjId)); } } - + return toRet; } @@ -227,8 +227,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { * Handles fetching and paging of data artifacts. */ public static class DataArtifactFetcher extends DAOFetcher { - private final DataArtifactDAO dao; - + /** * Main constructor. * @@ -236,17 +235,20 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { */ public DataArtifactFetcher(DataArtifactSearchParam params) { super(params); - this.dao = MainDAO.getInstance().getDataArtifactsDAO(); } - + + protected DataArtifactDAO getDAO() { + return MainDAO.getInstance().getDataArtifactsDAO(); + } + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return dao.getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { - return dao.isDataArtifactInvalidating(this.getParameters(), evt); + return getDAO().isDataArtifactInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index b488d5fc06..7550893779 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -463,8 +463,6 @@ public class FileSystemDAO extends AbstractDAO { */ public static class FileSystemFetcher extends DAOFetcher { - private final FileSystemDAO dao; - /** * Main constructor. * @@ -472,24 +470,25 @@ public class FileSystemDAO extends AbstractDAO { */ public FileSystemFetcher(FileSystemContentSearchParam params) { super(params); - this.dao = MainDAO.getInstance().getFileSystemDAO(); + } + + protected FileSystemDAO getDAO() { + return MainDAO.getInstance().getFileSystemDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return this.dao.getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return this.dao.isSystemContentInvalidating(this.getParameters(), evt); + return getDAO().isSystemContentInvalidating(this.getParameters(), evt); } } public static class FileSystemHostFetcher extends DAOFetcher { - private final FileSystemDAO dao; - /** * Main constructor. * @@ -497,17 +496,20 @@ public class FileSystemDAO extends AbstractDAO { */ public FileSystemHostFetcher(FileSystemHostSearchParam params) { super(params); - this.dao = MainDAO.getInstance().getFileSystemDAO(); + } + + protected FileSystemDAO getDAO() { + return MainDAO.getInstance().getFileSystemDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return this.dao.getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return this.dao.isSystemHostInvalidating(this.getParameters(), evt); + return getDAO().isSystemHostInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index b09e87bea2..55d7fa1a22 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -118,7 +118,8 @@ public class MainDAO extends AbstractDAO { viewsDAO, fileSystemDAO, tagsDAO, - accountsDAO); + osAccountsDAO, + commAccountsDAO); public DataArtifactDAO getDataArtifactsDAO() { return dataArtifactDAO; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 8f886d2cee..9b9c7961f2 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -86,7 +86,7 @@ public class OsAccountsDAO extends AbstractDAO { Case.Events.OS_ACCOUNTS_UPDATED.toString(), Case.Events.OS_ACCT_INSTANCES_ADDED.toString() ); - + private static OsAccountsDAO instance = null; synchronized static OsAccountsDAO getInstance() { @@ -115,7 +115,7 @@ public class OsAccountsDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams)); } - + public boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) { return evt instanceof OsAccountEvent; } @@ -193,11 +193,11 @@ public class OsAccountsDAO extends AbstractDAO { .map(evt -> new OsAccountEvent()) .limit(1) .collect(Collectors.toList()); - + if (!daoEvts.isEmpty()) { this.searchParamsCache.invalidateAll(); } - + return daoEvts; } @@ -206,8 +206,6 @@ public class OsAccountsDAO extends AbstractDAO { */ public static class AccountFetcher extends DAOFetcher { - private final OsAccountsDAO dao; - /** * Main constructor. * @@ -215,17 +213,20 @@ public class OsAccountsDAO extends AbstractDAO { */ public AccountFetcher(OsAccountsSearchParams params) { super(params); - this.dao = MainDAO.getInstance().getOsAccountsDAO(); + } + + protected OsAccountsDAO getDAO() { + return MainDAO.getInstance().getOsAccountsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return dao.getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return dao.isOSAccountInvalidatingEvt(this.getParameters(), evt); + return getDAO().isOSAccountInvalidatingEvt(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 43abd670e1..f4b6d0a592 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -408,8 +408,6 @@ public class TagsDAO extends AbstractDAO { */ public static class TagFetcher extends DAOFetcher { - private final TagsDAO dao; - /** * Main constructor. * @@ -417,17 +415,20 @@ public class TagsDAO extends AbstractDAO { */ public TagFetcher(TagsSearchParams params) { super(params); - this.dao = MainDAO.getInstance().getTagsDAO(); + } + + protected TagsDAO getDAO() { + return MainDAO.getInstance().getTagsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return dao.getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return dao.isTagsInvalidatingEvent(this.getParameters(), evt); + return getDAO().isTagsInvalidatingEvent(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 5f325a8b2e..efe6fe6c7d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -798,7 +798,7 @@ public class ViewsDAO extends AbstractDAO { || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> ds == mimeParams.getDataSourceId()))) { concurrentMap.remove(k); - // otherwise, see if suffix is present + // otherwise, see if suffix is present } else { Set dataSources = suffixes.get(mimePieces.getValue()); if (dataSources != null && (mimeParams.getDataSourceId() == null || dataSources.contains(mimeParams.getDataSourceId()))) { @@ -821,8 +821,6 @@ public class ViewsDAO extends AbstractDAO { */ public static class FileTypeExtFetcher extends DAOFetcher { - private final ViewsDAO dao; - /** * Main constructor. * @@ -830,17 +828,20 @@ public class ViewsDAO extends AbstractDAO { */ public FileTypeExtFetcher(FileTypeExtensionsSearchParams params) { super(params); - this.dao = MainDAO.getInstance().getViewsDAO(); + } + + protected ViewsDAO getDAO() { + return MainDAO.getInstance().getViewsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return this.dao.getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return this.dao.isFilesByExtInvalidating(this.getParameters(), evt); + return getDAO().isFilesByExtInvalidating(this.getParameters(), evt); } } @@ -849,8 +850,6 @@ public class ViewsDAO extends AbstractDAO { */ public static class FileTypeMimeFetcher extends DAOFetcher { - private final ViewsDAO dao; - /** * Main constructor. * @@ -858,17 +857,20 @@ public class ViewsDAO extends AbstractDAO { */ public FileTypeMimeFetcher(FileTypeMimeSearchParams params) { super(params); - this.dao = MainDAO.getInstance().getViewsDAO(); + } + + protected ViewsDAO getDAO() { + return MainDAO.getInstance().getViewsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return this.dao.getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return this.dao.isFilesByMimeInvalidating(this.getParameters(), evt); + return getDAO().isFilesByMimeInvalidating(this.getParameters(), evt); } } @@ -877,8 +879,6 @@ public class ViewsDAO extends AbstractDAO { */ public class FileTypeSizeFetcher extends DAOFetcher { - private final ViewsDAO dao; - /** * Main constructor. * @@ -886,17 +886,20 @@ public class ViewsDAO extends AbstractDAO { */ public FileTypeSizeFetcher(FileTypeSizeSearchParams params) { super(params); - this.dao = MainDAO.getInstance().getViewsDAO(); + } + + protected ViewsDAO getDAO() { + return MainDAO.getInstance().getViewsDAO(); } @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return this.dao.getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + return getDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); } @Override public boolean isRefreshRequired(DAOEvent evt) { - return this.dao.isFilesBySizeInvalidating(this.getParameters(), evt); + return getDAO().isFilesBySizeInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java new file mode 100755 index 0000000000..c4c2978077 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java @@ -0,0 +1,78 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + +/** + * An event for handling + */ +public class CommAccountsEvent implements DAOEvent { + + private final String accountType; + private final Long dataSourceId; + + public CommAccountsEvent(String accountType, Long dataSourceId) { + this.accountType = accountType; + this.dataSourceId = dataSourceId; + } + + public String getAccountType() { + return accountType; + } + + public Long getDataSourceId() { + return dataSourceId; + } + + @Override + public int hashCode() { + int hash = 3; + hash = 17 * hash + Objects.hashCode(this.accountType); + hash = 17 * hash + Objects.hashCode(this.dataSourceId); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final CommAccountsEvent other = (CommAccountsEvent) obj; + if (!Objects.equals(this.accountType, other.accountType)) { + return false; + } + if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.RESULT; + } + +} From b9f1a803e58fa91701adc72b21c4a7532f1b23a7 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 19 Nov 2021 09:43:40 -0500 Subject: [PATCH 050/142] fix --- .../mainui/datamodel/CommAccountsDAO.java | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index 8a7cede394..f577aae002 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -203,9 +203,23 @@ public class CommAccountsDAO extends AbstractDAO { .collect(Collectors.toList()); } - private boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { - throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. - + /** + * Returns true if the dao event could update the data stored in the + * parameters. + * + * @param parameters The parameters. + * @param evt The event. + * + * @return True if event invalidates parameters. + */ + public boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { + if (evt instanceof CommAccountsEvent) { + CommAccountsEvent commEvt = (CommAccountsEvent) evt; + return (parameters.getType().getTypeName().equals(commEvt.getAccountType())) + && (parameters.getDataSourceId() == null || parameters.getDataSourceId() == commEvt.getDataSourceId()); + } else { + return false; + } } /** From 2a2835d4eac91d86e985d5624630d8f9cdf2f248 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 19 Nov 2021 13:45:37 -0500 Subject: [PATCH 051/142] 7895 CR data artifact ingest module --- ...poFileIngestModule.java => CentralRepoIngestModule.java} | 6 +++--- .../ingestmodule/CentralRepoIngestModuleFactory.java | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) rename Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/{CentralRepoFileIngestModule.java => CentralRepoIngestModule.java} (98%) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java similarity index 98% rename from Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java rename to Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java index 8599f2e8c8..fec61caa42 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java @@ -48,9 +48,9 @@ import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIn * central repository, and makes previously notable analysis results for files * marked as notable in other cases. */ -final class CentralRepoFileIngestModule implements FileIngestModule { +final class CentralRepoIngestModule implements FileIngestModule { - private static final Logger logger = Logger.getLogger(CentralRepoFileIngestModule.class.getName()); + private static final Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName()); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private final boolean flagNotableItems; private final boolean saveCorrAttrInstances; @@ -65,7 +65,7 @@ final class CentralRepoFileIngestModule implements FileIngestModule { * * @param settings The ingest job settings. */ - CentralRepoFileIngestModule(IngestSettings settings) { + CentralRepoIngestModule(IngestSettings settings) { flagNotableItems = settings.isFlagTaggedNotableItems(); saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java index 9c52e2a2e0..8a5e122e6c 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java @@ -72,14 +72,14 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter { @Override public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) { if (settings instanceof IngestSettings) { - return new CentralRepoFileIngestModule((IngestSettings) settings); + return new CentralRepoIngestModule((IngestSettings) settings); } /* * Earlier versions of the modules had no ingest job settings. Create a * module with the default settings. */ if (settings instanceof NoIngestModuleIngestJobSettings) { - return new CentralRepoFileIngestModule((IngestSettings) getDefaultIngestJobSettings()); + return new CentralRepoIngestModule((IngestSettings) getDefaultIngestJobSettings()); } throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings"); } From 7ad21efaa66100f208e2990dc0adf174dce344cc Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 19 Nov 2021 13:47:57 -0500 Subject: [PATCH 052/142] 7895 CR data artifact ingest module --- .../ingestmodule/CentralRepoIngestModule.java | 120 +++++++++--------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java index fec61caa42..1d30b2f9bf 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java @@ -58,6 +58,66 @@ final class CentralRepoIngestModule implements FileIngestModule { private CentralRepository centralRepo; private CorrelationAttributeInstance.Type filesType; + @Override + public ProcessResult process(AbstractFile abstractFile) { + if (!flagNotableItems && !saveCorrAttrInstances) { + return ProcessResult.OK; + } + + if (!filesType.isEnabled()) { + return ProcessResult.OK; + } + + if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { + return ProcessResult.OK; + } + + if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { + return ProcessResult.OK; + } + + /* + * The correlation attribute value for a file is its MD5 hash. This + * module cannot do anything with a file if the hash calculation has not + * been done, but the decision has been made to not do a hash + * calculation here if the file hashing and lookup module is not in this + * pipeline ahead of this module (affirmed per BC, 11/8/21). + */ + String md5 = abstractFile.getMd5Hash(); + if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) { + return ProcessResult.OK; + } + + if (flagNotableItems) { + try { + TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query"); + Set otherCases = new HashSet<>(); + otherCases.addAll(centralRepo.getListCasesHavingArtifactInstancesKnownBad(filesType, md5)); + HealthMonitor.submitTimingMetric(timingMetric); + if (!otherCases.isEmpty()) { + makePrevNotableAnalysisResult(abstractFile, otherCases, filesType, md5, context.getDataSource().getId(), context.getJobId()); + } + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS + } catch (CorrelationAttributeNormalizationException ex) { + logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS + } + } + + if (saveCorrAttrInstances) { + List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(abstractFile); + for (CorrelationAttributeInstance corrAttr : corrAttrs) { + try { + centralRepo.addAttributeInstanceBulk(corrAttr); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS + } + } + } + + return ProcessResult.OK; + } + /** * Constructs a file ingest module that adds correlation attributes for * files to the central repository, and makes previously notable analysis @@ -128,66 +188,6 @@ final class CentralRepoIngestModule implements FileIngestModule { } } - @Override - public ProcessResult process(AbstractFile abstractFile) { - if (!flagNotableItems && !saveCorrAttrInstances) { - return ProcessResult.OK; - } - - if (!filesType.isEnabled()) { - return ProcessResult.OK; - } - - if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { - return ProcessResult.OK; - } - - if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) { - return ProcessResult.OK; - } - - /* - * The correlation attribute value for a file is its MD5 hash. This - * module cannot do anything with a file if the hash calculation has not - * been done, but the decision has been made to not do a hash - * calculation here if the file hashing and lookup module is not in this - * pipeline ahead of this module (affirmed per BC, 11/8/21). - */ - String md5 = abstractFile.getMd5Hash(); - if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) { - return ProcessResult.OK; - } - - if (flagNotableItems) { - try { - TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query"); - Set otherCases = new HashSet<>(); - otherCases.addAll(centralRepo.getListCasesHavingArtifactInstancesKnownBad(filesType, md5)); - HealthMonitor.submitTimingMetric(timingMetric); - if (!otherCases.isEmpty()) { - makePrevNotableAnalysisResult(abstractFile, otherCases, filesType, md5, context.getDataSource().getId(), context.getJobId()); - } - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS - } catch (CorrelationAttributeNormalizationException ex) { - logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS - } - } - - if (saveCorrAttrInstances) { - List corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(abstractFile); - for (CorrelationAttributeInstance corrAttr : corrAttrs) { - try { - centralRepo.addAttributeInstanceBulk(corrAttr); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS - } - } - } - - return ProcessResult.OK; - } - @Override public void shutDown() { if (refCounter.decrementAndGet(context.getJobId()) == 0) { From 680a13a44dfc0149d46e2e31f1e6f295dc0cf2b3 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 19 Nov 2021 14:22:21 -0500 Subject: [PATCH 053/142] Update AbstractDAO.java --- .../autopsy/mainui/datamodel/AbstractDAO.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index c12733d9c0..d140dabec3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -34,11 +34,19 @@ abstract class AbstractDAO { abstract void clearCaches(); /** - * Handles an autopsy event (i.e. ingest, case, etc.). + * Handles an autopsy event (i.e. ingest, case, etc.). This method is responsible + * for clearing internal caches that are effected by the event and returning + * one or more DAOEvents that should be broadcasted to the views. * - * @param evt The autopsy events. + * This method is responsible for minimizing the number of DAOEvents that + * are returned. For example, if there are 100 Autopsy events for the + * same type of data artifact in the same data source, then only a single + * DataArtifact event needs to be returned. * - * @return The list of dao events emitted due to this autopsy event. + * @param evt The Autopsy events that recently came in from Ingest/Case. + * + * @return The list of DAOEvents that should be broadcasted to the views or + * an empty list if the Autopsy events are irrelevant to this DAO. */ abstract List handleAutopsyEvent(Collection evt); From 14c58c2a57abffa2973b877125e60673ec8783b0 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 19 Nov 2021 14:28:54 -0500 Subject: [PATCH 054/142] 7895 CR data artifact ingest module --- .../autopsy/actions/Bundle.properties-MERGED | 7 +++++ .../casemodule/Bundle.properties-MERGED | 27 +++++++++++++------ .../interestingitems/Bundle.properties-MERGED | 1 + .../netbeans/core/startup/Bundle.properties | 2 +- .../core/windows/view/ui/Bundle.properties | 2 +- 5 files changed, 29 insertions(+), 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED index b20993c416..5c9a0ea3ac 100755 --- a/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/actions/Bundle.properties-MERGED @@ -6,14 +6,19 @@ AddBlackboardArtifactTagAction.unableToTag.msg=Unable to tag {0}. AddContentTagAction.cannotApplyTagErr=Cannot Apply Tag AddContentTagAction.pluralTagFile=Add File Tags AddContentTagAction.singularTagFile=Add File Tag +# {0} - fileName +# {1} - tagName AddContentTagAction.tagExists={0} has been tagged as {1}. Cannot reapply the same tag. AddContentTagAction.taggingErr=Tagging Error +# {0} - fileName AddContentTagAction.unableToTag.msg=Unable to tag {0}, not a regular file. +# {0} - fileName AddContentTagAction.unableToTag.msg2=Unable to tag {0}. CTL_DumpThreadAction=Thread Dump CTL_ShowIngestProgressSnapshotAction=Ingest Status Details DeleteBlackboardArtifactTagAction.deleteTag=Remove Selected Tag(s) DeleteBlackboardArtifactTagAction.tagDelErr=Tag Deletion Error +# {0} - tagName DeleteBlackboardArtifactTagAction.unableToDelTag.msg=Unable to delete tag {0}. DeleteContentTagAction.deleteTag=Remove Selected Tag(s) DeleteContentTagAction.tagDelErr=Tag Deletion Error @@ -79,6 +84,8 @@ CTL_OpenOutputFolder=Open Case Folder OpenOutputFolder.error1=Case Folder Not Found: {0} OpenOutputFolder.noCaseOpen=No open case, therefore no current case folder available. OpenOutputFolder.CouldNotOpenOutputFolder=Could not open case folder +# {0} - old tag name +# {1} - artifactID ReplaceBlackboardArtifactTagAction.replaceTag.alert=Unable to replace tag {0} for artifact {1}. # {0} - old tag name # {1} - content obj id diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED index 9a43ffe229..528d3a5088 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/casemodule/Bundle.properties-MERGED @@ -247,10 +247,15 @@ AddImageWizardIngestConfigPanel.dsProcDone.errs.text=*Errors encountered in addi AddImageWizardIngestConfigVisual.getName.text=Configure Ingest AddImageWizardIterator.stepXofN=Step {0} of {1} AddLocalFilesTask.localFileAdd.progress.text=Adding: {0}/{1} -Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open! +Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open\! Case.open.msgDlg.updated.msg=Updated case database schema.\nA backup copy of the database with the following path has been made:\n {0} Case.open.msgDlg.updated.title=Case Database Schema Update -Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \nthis case are missing. Would you like to search for them now?\nPreviously, the image was located at:\n{0}\nPlease note that you will still be able to browse directories and generate reports\nif you choose No, but you will not be able to view file content or run the ingest process. +Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \n\ +this case are missing. Would you like to search for them now?\n\ +Previously, the image was located at:\n\ +{0}\n\ +Please note that you will still be able to browse directories and generate reports\n\ +if you choose No, but you will not be able to view file content or run the ingest process. Case.checkImgExist.confDlg.doesntExist.title=Missing Image Case.addImg.exception.msg=Error adding image to the case Case.updateCaseName.exception.msg=Error while trying to update the case name. @@ -269,9 +274,12 @@ Case.GetCaseTypeGivenPath.Failure=Unable to get case type Case.metaDataFileCorrupt.exception.msg=The case metadata file (.aut) is corrupted. Case.deleteReports.deleteFromDiskException.log.msg=Unable to delete the report from the disk. Case.deleteReports.deleteFromDiskException.msg=Unable to delete the report {0} from the disk.\nYou may manually delete it from {1} -CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \nCase Name: {0}\nCase Directory: {1} +CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \n\ + Case Name: {0}\n\ + Case Directory: {1} CaseDeleteAction.closeConfMsg.title=Warning: Closing the Current Case -CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\nClose the folder and file and try again or you can delete the case manually. +CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\n\ +Close the folder and file and try again or you can delete the case manually. CaseDeleteAction.msgDlg.fileInUse.title=Error: Folder In Use CaseDeleteAction.msgDlg.caseDelete.msg=Case {0} has been deleted. CaseOpenAction.autFilter.title={0} Case File ( {1}) @@ -303,7 +311,8 @@ NewCaseWizardAction.databaseProblem1.text=Cannot open database. Cancelling case NewCaseWizardAction.databaseProblem2.text=Error NewCaseWizardPanel1.validate.errMsg.invalidSymbols=The Case Name cannot contain any of the following symbols: \\ / : * ? " < > | NewCaseWizardPanel1.validate.errMsg.dirExists=Case directory ''{0}'' already exists. -NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\nDo you want to create that directory? +NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\n\ + Do you want to create that directory? NewCaseWizardPanel1.validate.confMsg.createDir.title=Create directory NewCaseWizardPanel1.validate.errMsg.cantCreateParDir.msg=Error: Could not create case parent directory {0} NewCaseWizardPanel1.validate.errMsg.prevCreateBaseDir.msg=Prevented from creating base directory {0} @@ -332,6 +341,7 @@ OptionalCasePropertiesPanel.lbPointOfContactPhoneLabel.text=Phone: OptionalCasePropertiesPanel.orgainizationPanel.border.title=Organization RecentCases.exception.caseIdxOutOfRange.msg=Recent case index {0} is out of range. RecentCases.getName.text=Clear Recent Cases +# {0} - case name RecentItems.openRecentCase.msgDlg.text=Case {0} no longer exists. SelectDataSourceProcessorPanel.name.text=Select Data Source Type StartupWindow.title.text=Welcome @@ -344,6 +354,7 @@ StartupWindowProvider.openCase.noFile=Unable to open previously open case becaus UnpackagePortableCaseDialog.title.text=Unpackage Portable Case UnpackagePortableCaseDialog.UnpackagePortableCaseDialog.extensions=Portable case package (.zip, .zip.001) UnpackagePortableCaseDialog.validatePaths.badExtension=File extension must be .zip or .zip.001 +# {0} - case folder UnpackagePortableCaseDialog.validatePaths.caseFolderExists=Folder {0} already exists UnpackagePortableCaseDialog.validatePaths.caseIsNotFile=Selected path is not a file UnpackagePortableCaseDialog.validatePaths.caseNotFound=File does not exist @@ -358,8 +369,8 @@ UnpackageWorker.doInBackground.previouslySeenCase=Case has been previously opene UpdateRecentCases.menuItem.clearRecentCases.text=Clear Recent Cases UpdateRecentCases.menuItem.empty=-Empty- AddImageWizardIngestConfigPanel.CANCEL_BUTTON.text=Cancel -NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on "C:" drive -NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on "C:" drive. Case folder is created on the target system +NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on \"C:\" drive +NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on \"C:\" drive. Case folder is created on the target system NewCaseVisualPanel1.CaseFolderOnInternalDriveLinuxError.text=Warning: Path to case folder is on the target system. Create case folder in mounted drive. NewCaseVisualPanel1.uncPath.error=Error: UNC paths are not allowed for Single-User cases CollaborationMonitor.addingDataSourceStatus.msg={0} adding data source @@ -367,7 +378,7 @@ CollaborationMonitor.analyzingDataSourceStatus.msg={0} analyzing {1} MissingImageDialog.lbWarning.text= MissingImageDialog.lbWarning.toolTipText= NewCaseVisualPanel1.caseParentDirWarningLabel.text= -NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-User\t\t +NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-User NewCaseVisualPanel1.singleUserCaseRadioButton.text=Single-User NewCaseVisualPanel1.caseTypeLabel.text=Case Type: SingleUserCaseConverter.BadDatabaseFileName=Database file does not exist! diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED index cccbcc1b57..06699b446a 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED @@ -2,6 +2,7 @@ FilesIdentifierIngestJobSettingsPanel.getError=Error getting interesting files s FilesIdentifierIngestJobSettingsPanel.updateError=Error updating interesting files sets settings file. FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file. FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search. +# {0} - daysIncluded FilesSet.rule.dateRule.toString=(modified within {0} day(s)) FilesSetDefsPanel.bytes=Bytes FilesSetDefsPanel.cancelImportMsg=Cancel import diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 083683cb3a..cd253dc3cb 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Thu, 04 Nov 2021 16:51:33 -0400 +#Thu, 30 Sep 2021 19:36:31 -0400 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index f098a02b32..2d02262803 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Thu, 04 Nov 2021 16:51:33 -0400 +#Thu, 30 Sep 2021 19:36:31 -0400 CTL_MainWindow_Title=Autopsy 4.19.2 CTL_MainWindow_Title_No_Project=Autopsy 4.19.2 From de90fdf4cdf0384f247e52efae521038916c5751 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 19 Nov 2021 14:32:35 -0500 Subject: [PATCH 055/142] Update AnalysisResultDAO.java --- .../mainui/datamodel/AnalysisResultDAO.java | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index db38727282..d5b61f3165 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -762,22 +762,24 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } /** - * Generate DAO events from digest of autopsy events. + * Generate DAO events from digest of Autopsy events. * - * @param analysisResultMap A mapping of analysis result type ids to data - * sources where artifacts were created. - * @param setMap A mapping of (artifact type id, set name) to - * data sources where artifacts were created. + * @param analysisResultMap Contains the analysis results that do not use + * a set name. A mapping of analysis result type ids to data + * sources where the results were created. + * @param resultsWithSetMap Contains the anlaysis results that do use a set + * name. A mapping of (analysis result type id, set name) to + * data sources where results were created. * * @return The list of dao events. */ - private List getDAOEvents(Map> analysisResultMap, Map, Set> setMap) { + private List getDAOEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { // invalidate cache entries that are affected by events // GVDTODO handle concurrency issues that may arise Stream analysisResultEvts = analysisResultMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); - Stream analysisResultSetEvts = setMap.entrySet().stream() + Stream analysisResultSetEvts = resultsWithSetMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId))); // GVDTODO handle keyword hits @@ -789,12 +791,14 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { /** * Clears cache entries given the provided digests of autopsy events. * - * @param analysisResultMap A mapping of analysis result type ids to data - * sources where artifacts were created. - * @param setMap A mapping of (artifact type id, set name) to - * data sources where artifacts were created. + * @param analysisResultMap Contains the analysis results that do not use + * a set name. A mapping of analysis result type ids to data + * sources where the results were created. + * @param resultsWithSetMap Contains the anlaysis results that do use a set + * name. A mapping of (analysis result type id, set name) to + * data sources where results were created. */ - private void clearRelevantCacheEntries(Map> analysisResultMap, Map, Set> setMap) { + private void clearRelevantCacheEntries(Map> analysisResultMap, Map, Set> resultsWithSetMap) { ConcurrentMap, AnalysisResultTableSearchResultsDTO> arConcurrentMap = this.analysisResultCache.asMap(); arConcurrentMap.forEach((k, v) -> { BlackboardArtifactSearchParam searchParam = k.getParamData(); @@ -807,7 +811,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { ConcurrentMap, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap(); setConcurrentMap.forEach((k, v) -> { AnalysisResultSetSearchParam searchParam = k.getParamData(); - Set dsIds = setMap.get(Pair.of(searchParam.getArtifactType().getTypeID(), searchParam.getSetName())); + Set dsIds = resultsWithSetMap.get(Pair.of(searchParam.getArtifactType().getTypeID(), searchParam.getSetName())); if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { arConcurrentMap.remove(k); } From 3cd559bb48f5852730d0c3cf12122a2050de5359 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 19 Nov 2021 14:47:27 -0500 Subject: [PATCH 056/142] 7895 CR data artifact ingest module --- .../contentviewers/Bundle.properties-MERGED | 1337 +++++++++-------- .../autopsy/core/Bundle.properties-MERGED | 8 +- .../corecomponents/Bundle.properties-MERGED | 6 +- .../coreutils/Bundle.properties-MERGED | 4 +- .../datamodel/Bundle.properties-MERGED | 8 +- .../filesearch/Bundle.properties-MERGED | 4 +- .../autopsy/ingest/Bundle.properties-MERGED | 2 +- .../Bundle.properties-MERGED | 7 +- .../fileextmismatch/Bundle.properties-MERGED | 18 +- .../hashdatabase/Bundle.properties-MERGED | 10 +- .../interestingitems/Bundle.properties-MERGED | 4 +- .../photoreccarver/Bundle.properties-MERGED | 2 +- .../modules/html/Bundle.properties-MERGED | 6 +- .../configuration/Bundle.properties-MERGED | 4 - 14 files changed, 748 insertions(+), 672 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED index 925e77c27c..be12157474 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Bundle.properties-MERGED @@ -180,776 +180,837 @@ TranslatablePanel.comboBoxOption.translatedText=Translated Text # {0} - exception message TranslatablePanel.onSetContentError.text=There was an error displaying the text: {0} ## Window toolbar Title -viewer.window.title.default=ICEpdf Viewer -viewer.window.title.open.default=ICEpdf Viewer - [{0}] +viewer.window.title.default = ICEpdf Viewer +viewer.window.title.open.default = ICEpdf Viewer - [{0}] #status bar -viewer.statusbar.currentPage=Page {0} / {1} +viewer.statusbar.currentPage = Page {0} / {1} -viewer.common.number.one=1 -viewer.common.number.two=2 -viewer.common.number.three=3 -viewer.common.number.four=4 -viewer.common.number.five=5 -viewer.common.number.six=6 -viewer.common.number.seven=7 -viewer.common.number.eight=8 -viewer.common.number.nine=9 -viewer.common.number.ten=10 -viewer.common.number.eleven=11 -viewer.common.number.twelve=12 -viewer.common.number.thirteen=13 -viewer.common.number.fourteen=14 -viewer.common.number.fifteen=15 -viewer.common.number.sixteen=16 -viewer.common.number.seventeen=17 -viewer.common.number.eighteen=18 -viewer.common.number.nineteen=19 -viewer.common.number.twenty=20 -viewer.common.number.twentyOne=21 -viewer.common.number.twentyTwo=22 -viewer.common.number.twentyThree=23 -viewer.common.number.twentyFour=24 -viewer.common.number.twentyFive=25 -viewer.common.number.twentySix=26 -viewer.common.number.twentySeven=27 -viewer.common.number.thirtySix=36 -viewer.common.number.fortyEight=48 +viewer.common.number.one = 1 +viewer.common.number.two = 2 +viewer.common.number.three = 3 +viewer.common.number.four = 4 +viewer.common.number.five = 5 +viewer.common.number.six = 6 +viewer.common.number.seven = 7 +viewer.common.number.eight = 8 +viewer.common.number.nine = 9 +viewer.common.number.ten = 10 +viewer.common.number.eleven = 11 +viewer.common.number.twelve = 12 +viewer.common.number.thirteen = 13 +viewer.common.number.fourteen = 14 +viewer.common.number.fifteen = 15 +viewer.common.number.sixteen = 16 +viewer.common.number.seventeen = 17 +viewer.common.number.eighteen = 18 +viewer.common.number.nineteen = 19 +viewer.common.number.twenty = 20 +viewer.common.number.twentyOne = 21 +viewer.common.number.twentyTwo = 22 +viewer.common.number.twentyThree = 23 +viewer.common.number.twentyFour = 24 +viewer.common.number.twentyFive = 25 +viewer.common.number.twentySix = 26 +viewer.common.number.twentySeven = 27 +viewer.common.number.thirtySix = 36 +viewer.common.number.fortyEight = 48 ## Top Page Control Toolbar -viewer.toolbar.hideToolBar.label=Hide Toolbar -viewer.toolbar.showToolBar.label=Show Toolbar -viewer.toolbar.showUtilityPane.label=Show Utility Pane -viewer.toolbar.hideUtilityPane.label=Hide Utility Pane -viewer.toolbar.open.label= -viewer.toolbar.open.tooltip=Open Document -viewer.toolbar.saveAs.label=Save As -viewer.toolbar.saveAs.tooltip=Save As... -viewer.toolbar.print.label=Print -viewer.toolbar.print.tooltip=Print Document -viewer.toolbar.search.label=Search -viewer.toolbar.search.tooltip=Search Document -viewer.toolbar.utilityPane.label=Utility Pane -viewer.toolbar.utilityPane.tooltip=Show/Hide Utility Pane -viewer.toolbar.navigation.label= -viewer.toolbar.navigation.pages.tooltip=Number of Pages -viewer.toolbar.navigation.pages.firstPage.label= -viewer.toolbar.navigation.current.tooltip=Current Page Number -viewer.toolbar.navigation.current.firstPage.label= -viewer.toolbar.navigation.firstPage.label= -viewer.toolbar.navigation.firstPage.tooltip=First Page -viewer.toolbar.navigation.previousPage.label= -viewer.toolbar.navigation.previousPage.tooltip=Previous Page -viewer.toolbar.navigation.nextPage.label= -viewer.toolbar.navigation.nextPage.tooltip=Next Page -viewer.toolbar.navigation.lastPage.label= -viewer.toolbar.navigation.lastPage.tooltip=Last Page -viewer.toolbar.pageIndicator=of {0} -viewer.toolbar.zoom.label= -viewer.toolbar.zoom.tooltip=Zoom -viewer.toolbar.zoom.out.label= -viewer.toolbar.zoom.out.tooltip=Zoom Out -viewer.toolbar.zoom.in.label= -viewer.toolbar.zoom.in.tooltip=Zoom In -viewer.toolbar.pageFit.actualsize.label= -viewer.toolbar.pageFit.actualsize.tooltip=Actual Size -viewer.toolbar.pageFit.fitWindow.label= -viewer.toolbar.pageFit.fitWindow.tooltip=Fit in Window -viewer.toolbar.pageFit.fitWidth.label= -viewer.toolbar.pageFit.fitWidth.tooltip=Fit Width -viewer.toolbar.rotation.left.label= -viewer.toolbar.rotation.left.tooltip=Rotate Left -viewer.toolbar.rotation.right.label= -viewer.toolbar.rotation.right.tooltip=Rotate Right -viewer.toolbar.tool.pan.label= -viewer.toolbar.tool.pan.tooltip=Pan Tool -viewer.toolbar.tool.text.label= -viewer.toolbar.tool.text.tooltip=Text Select Tool -viewer.toolbar.tool.select.label= -viewer.toolbar.tool.select.tooltip=Select Tool -viewer.toolbar.tool.link.label= -viewer.toolbar.tool.link.tooltip=Link Annotation Tool -viewer.toolbar.tool.highlight.label=Highlight -viewer.toolbar.tool.highlight.tooltip=Highlight Annotation Tool -viewer.toolbar.tool.strikeOut.label=Strike Out -viewer.toolbar.tool.strikeOut.tooltip=Strike Out Annotation Tool -viewer.toolbar.tool.underline.label=Underline -viewer.toolbar.tool.underline.tooltip=Underline Annotation Tool -viewer.toolbar.tool.line.label=Line -viewer.toolbar.tool.line.tooltip=Line Annotation Tool -viewer.toolbar.tool.lineArrow.label=Line Arrow -viewer.toolbar.tool.lineArrow.tooltip=Line Arrow Annotation Tool -viewer.toolbar.tool.rectangle.label=Rectangle -viewer.toolbar.tool.rectangle.tooltip=Rectangle Annotation Tool -viewer.toolbar.tool.circle.label=Circle -viewer.toolbar.tool.circle.tooltip=Circle Annotation Tool -viewer.toolbar.tool.ink.label=Ink -viewer.toolbar.tool.ink.tooltip=Ink Annotation Tool -viewer.toolbar.tool.freeText.label=Free Text -viewer.toolbar.tool.freeText.tooltip=Free Text Annotation Tool -viewer.toolbar.tool.textAnno.label=Text Annotation -viewer.toolbar.tool.textAnno.tooltip=Text Annotation Tool -viewer.toolbar.tool.plolyLine.label=Poly Line -viewer.toolbar.tool.plolyLine.tooltip=Poly Line Annotation Tool -viewer.toolbar.tool.zoomIn.label= -viewer.toolbar.tool.zoomIn.tooltip=Zoom In Tool -viewer.toolbar.tool.zoomMarquis.label= -viewer.toolbar.tool.zoomMarquis.tooltip=Zoom Marquee Tool -viewer.toolbar.tool.zoomDynamic.label= -viewer.toolbar.tool.zoomDynamic.tooltip=Zoom Dynamic Tool -viewer.toolbar.tool.zoomOut.label= -viewer.toolbar.tool.zoomOut.tooltip=Zoom Out Tool -viewer.toolbar.pageFit.fontEngine.label= -viewer.toolbar.pageFit.fontEngine.tooltip=Enable/Disable Font Engine -viewer.toolbar.tool.forms.highlight.label=Highlight Forms -viewer.toolbar.tool.forms.highlight.tooltip=Show/Hide Form Highlighting +viewer.toolbar.hideToolBar.label = Hide Toolbar +viewer.toolbar.showToolBar.label = Show Toolbar +viewer.toolbar.showUtilityPane.label = Show Utility Pane +viewer.toolbar.hideUtilityPane.label = Hide Utility Pane +viewer.toolbar.open.label = +viewer.toolbar.open.tooltip = Open Document +viewer.toolbar.saveAs.label = Save As +viewer.toolbar.saveAs.tooltip = Save As... +viewer.toolbar.print.label = Print +viewer.toolbar.print.tooltip = Print Document +viewer.toolbar.search.label = Search +viewer.toolbar.search.tooltip = Search Document +viewer.toolbar.utilityPane.label = Utility Pane +viewer.toolbar.utilityPane.tooltip = Show/Hide Utility Pane +viewer.toolbar.navigation.label = +viewer.toolbar.navigation.pages.tooltip = Number of Pages +viewer.toolbar.navigation.pages.firstPage.label = +viewer.toolbar.navigation.current.tooltip = Current Page Number +viewer.toolbar.navigation.current.firstPage.label = +viewer.toolbar.navigation.firstPage.label = +viewer.toolbar.navigation.firstPage.tooltip = First Page +viewer.toolbar.navigation.previousPage.label = +viewer.toolbar.navigation.previousPage.tooltip = Previous Page +viewer.toolbar.navigation.nextPage.label = +viewer.toolbar.navigation.nextPage.tooltip = Next Page +viewer.toolbar.navigation.lastPage.label = +viewer.toolbar.navigation.lastPage.tooltip = Last Page +viewer.toolbar.pageIndicator = of {0} +viewer.toolbar.zoom.label = +viewer.toolbar.zoom.tooltip = Zoom +viewer.toolbar.zoom.out.label = +viewer.toolbar.zoom.out.tooltip = Zoom Out +viewer.toolbar.zoom.in.label = +viewer.toolbar.zoom.in.tooltip = Zoom In +viewer.toolbar.pageFit.actualsize.label = +viewer.toolbar.pageFit.actualsize.tooltip = Actual Size +viewer.toolbar.pageFit.fitWindow.label = +viewer.toolbar.pageFit.fitWindow.tooltip = Fit in Window +viewer.toolbar.pageFit.fitWidth.label = +viewer.toolbar.pageFit.fitWidth.tooltip = Fit Width +viewer.toolbar.rotation.left.label = +viewer.toolbar.rotation.left.tooltip = Rotate Left +viewer.toolbar.rotation.right.label = +viewer.toolbar.rotation.right.tooltip = Rotate Right +viewer.toolbar.tool.pan.label = +viewer.toolbar.tool.pan.tooltip = Pan Tool +viewer.toolbar.tool.text.label = +viewer.toolbar.tool.text.tooltip = Text Select Tool +viewer.toolbar.tool.select.label = +viewer.toolbar.tool.select.tooltip = Select Tool +viewer.toolbar.tool.link.label = +viewer.toolbar.tool.link.tooltip = Link Annotation Tool +viewer.toolbar.tool.highlight.label = Highlight +viewer.toolbar.tool.highlight.tooltip = Highlight Annotation Tool +viewer.toolbar.tool.strikeOut.label = Strike Out +viewer.toolbar.tool.strikeOut.tooltip = Strike Out Annotation Tool +viewer.toolbar.tool.underline.label = Underline +viewer.toolbar.tool.underline.tooltip = Underline Annotation Tool +viewer.toolbar.tool.line.label = Line +viewer.toolbar.tool.line.tooltip = Line Annotation Tool +viewer.toolbar.tool.lineArrow.label = Line Arrow +viewer.toolbar.tool.lineArrow.tooltip = Line Arrow Annotation Tool +viewer.toolbar.tool.rectangle.label = Rectangle +viewer.toolbar.tool.rectangle.tooltip = Rectangle Annotation Tool +viewer.toolbar.tool.circle.label = Circle +viewer.toolbar.tool.circle.tooltip = Circle Annotation Tool +viewer.toolbar.tool.ink.label = Ink +viewer.toolbar.tool.ink.tooltip = Ink Annotation Tool +viewer.toolbar.tool.freeText.label = Free Text +viewer.toolbar.tool.freeText.tooltip = Free Text Annotation Tool +viewer.toolbar.tool.textAnno.label = Text Annotation +viewer.toolbar.tool.textAnno.tooltip = Text Annotation Tool +viewer.toolbar.tool.plolyLine.label = Poly Line +viewer.toolbar.tool.plolyLine.tooltip = Poly Line Annotation Tool +viewer.toolbar.tool.zoomIn.label = +viewer.toolbar.tool.zoomIn.tooltip = Zoom In Tool +viewer.toolbar.tool.zoomMarquis.label = +viewer.toolbar.tool.zoomMarquis.tooltip = Zoom Marquee Tool +viewer.toolbar.tool.zoomDynamic.label = +viewer.toolbar.tool.zoomDynamic.tooltip = Zoom Dynamic Tool +viewer.toolbar.tool.zoomOut.label = +viewer.toolbar.tool.zoomOut.tooltip = Zoom Out Tool +viewer.toolbar.pageFit.fontEngine.label = +viewer.toolbar.pageFit.fontEngine.tooltip = Enable/Disable Font Engine +viewer.toolbar.tool.forms.highlight.label = Highlight Forms +viewer.toolbar.tool.forms.highlight.tooltip = Show/Hide Form Highlighting ## Bottom Page View Control Toolbar -viewer.toolbar.pageView.nonContinuous.singlePage.label= -viewer.toolbar.pageView.nonContinuous.singlePage.tooltip=Single Page View Non-Continuous -viewer.toolbar.pageView.nonContinuous.facingPage.label= -viewer.toolbar.pageView.nonContinuous.facingPage.tooltip=Facing Page View Non-Continuous -viewer.toolbar.pageView.continuous.singlePage.label= -viewer.toolbar.pageView.continuous.singlePage.tooltip=Single Page View Continuous -viewer.toolbar.pageView.continuous.facingPage.label= -viewer.toolbar.pageView.continuous.facingPage.tooltip=Facing Page View Continuous +viewer.toolbar.pageView.nonContinuous.singlePage.label = +viewer.toolbar.pageView.nonContinuous.singlePage.tooltip = Single Page View Non-Continuous +viewer.toolbar.pageView.nonContinuous.facingPage.label = +viewer.toolbar.pageView.nonContinuous.facingPage.tooltip = Facing Page View Non-Continuous +viewer.toolbar.pageView.continuous.singlePage.label = +viewer.toolbar.pageView.continuous.singlePage.tooltip = Single Page View Continuous +viewer.toolbar.pageView.continuous.facingPage.label = +viewer.toolbar.pageView.continuous.facingPage.tooltip = Facing Page View Continuous ## File Menu and submenu items -viewer.menu.file.label=File -viewer.menu.file.mnemonic=F -viewer.menu.open.label=Open -viewer.menu.open.file.label=File... -viewer.menu.open.URL.label=URL... -viewer.menu.close.label=Close -viewer.menu.saveAs.label=Save As... -viewer.menu.exportText.label=Export Text... -viewer.menu.exportSVG.label=Export SVG... -viewer.menu.documentPermission.label=Document Permissions... -viewer.menu.documentInformation.label=Document Information... -viewer.menu.documentFonts.label=Document Fonts... -viewer.menu.printSetup.label=Print Setup... -viewer.menu.print.label=Print... -viewer.menu.exit.label=Exit +viewer.menu.file.label = File +viewer.menu.file.mnemonic = F +viewer.menu.open.label = Open +viewer.menu.open.file.label = File... +viewer.menu.open.URL.label = URL... +viewer.menu.close.label = Close +viewer.menu.saveAs.label = Save As... +viewer.menu.exportText.label = Export Text... +viewer.menu.exportSVG.label = Export SVG... +viewer.menu.documentPermission.label = Document Permissions... +viewer.menu.documentInformation.label = Document Information... +viewer.menu.documentFonts.label = Document Fonts... +viewer.menu.printSetup.label = Print Setup... +viewer.menu.print.label = Print... +viewer.menu.exit.label = Exit ## View Menu and submenu items -viewer.menu.edit.label=Edit -viewer.menu.edit.mnemonic=E -viewer.menu.edit.undo.label=Undo -viewer.menu.edit.redo.label=Redo -viewer.menu.edit.copy.label=Copy -viewer.menu.edit.delete.label=Delete -viewer.menu.edit.selectAll.label=Select All -viewer.menu.edit.deselectAll.label=Deselect All +viewer.menu.edit.label = Edit +viewer.menu.edit.mnemonic = E +viewer.menu.edit.undo.label = Undo +viewer.menu.edit.redo.label = Redo +viewer.menu.edit.copy.label = Copy +viewer.menu.edit.delete.label = Delete +viewer.menu.edit.selectAll.label = Select All +viewer.menu.edit.deselectAll.label = Deselect All ## View Menu and submenu items -viewer.menu.view.label=View -viewer.menu.view.mnemonic=V -viewer.menu.view.actualSize.label=Actual Size -viewer.menu.view.fitInWindow.label=Fit in Window -viewer.menu.view.fitWidth.label=Fit Width -viewer.menu.view.zoomIn.label=Zoom In -viewer.menu.view.zoomOut.label=Zoom Out -viewer.menu.view.rotateLeft.label=Rotate Left -viewer.menu.view.rotateRight.label=Rotate Right -viewer.menu.view.hideToolBar.label=Hide Toolbar -viewer.menu.view.showToolBar.label=Show Toolbar -viewer.menu.view.showUtilityPane.label=Show Utility Pane -viewer.menu.view.hideUtilityPane.label=Hide Utility Pane +viewer.menu.view.label = View +viewer.menu.view.mnemonic = V +viewer.menu.view.actualSize.label = Actual Size +viewer.menu.view.fitInWindow.label = Fit in Window +viewer.menu.view.fitWidth.label = Fit Width +viewer.menu.view.zoomIn.label = Zoom In +viewer.menu.view.zoomOut.label = Zoom Out +viewer.menu.view.rotateLeft.label = Rotate Left +viewer.menu.view.rotateRight.label = Rotate Right +viewer.menu.view.hideToolBar.label = Hide Toolbar +viewer.menu.view.showToolBar.label = Show Toolbar +viewer.menu.view.showUtilityPane.label = Show Utility Pane +viewer.menu.view.hideUtilityPane.label = Hide Utility Pane ## Document Menu and submenu items -viewer.menu.document.label=Document -viewer.menu.document.mnemonic=D -viewer.menu.document.firstPage.label=First Page -viewer.menu.document.previousPage.label=Previous Page -viewer.menu.document.nextPage.label=Next Page -viewer.menu.document.lastPage.label=Last Page -viewer.menu.document.search.label=Search... -viewer.menu.document.gotToPage.label=Go To Page... +viewer.menu.document.label = Document +viewer.menu.document.mnemonic = D +viewer.menu.document.firstPage.label = First Page +viewer.menu.document.previousPage.label = Previous Page +viewer.menu.document.nextPage.label = Next Page +viewer.menu.document.lastPage.label = Last Page +viewer.menu.document.search.label = Search... +viewer.menu.document.gotToPage.label = Go To Page... ## Window Menu and submenu items -viewer.menu.window.label=Window -viewer.menu.window.mnemonic=W -viewer.menu.window.minAll.label=Minimize All -viewer.menu.window.minAll.mnemonic=M -viewer.menu.window.frontAll.label=Bring All to Front -viewer.menu.window.frontAll.mnemonic=B -viewer.menu.window.1.label=1 -viewer.menu.window.1.mnemonic=1 -viewer.menu.window.2.label=2 -viewer.menu.window.2.mnemonic=2 -viewer.menu.window.3.label=3 -viewer.menu.window.3.mnemonic=3 -viewer.menu.window.4.label=4 -viewer.menu.window.4.mnemonic=4 -viewer.menu.window.5.label=5 -viewer.menu.window.5.mnemonic=5 -viewer.menu.window.6.label=6 -viewer.menu.window.6.mnemonic=6 -viewer.menu.window.7.label=7 -viewer.menu.window.7.mnemonic=7 -viewer.menu.window.8.label=8 -viewer.menu.window.8.mnemonic=8 -viewer.menu.window.9.label=9 -viewer.menu.window.9.mnemonic=9 +viewer.menu.window.label = Window +viewer.menu.window.mnemonic = W +viewer.menu.window.minAll.label = Minimize All +viewer.menu.window.minAll.mnemonic = M +viewer.menu.window.frontAll.label = Bring All to Front +viewer.menu.window.frontAll.mnemonic = B +viewer.menu.window.1.label = 1 +viewer.menu.window.1.mnemonic = 1 +viewer.menu.window.2.label = 2 +viewer.menu.window.2.mnemonic = 2 +viewer.menu.window.3.label = 3 +viewer.menu.window.3.mnemonic = 3 +viewer.menu.window.4.label = 4 +viewer.menu.window.4.mnemonic = 4 +viewer.menu.window.5.label = 5 +viewer.menu.window.5.mnemonic = 5 +viewer.menu.window.6.label = 6 +viewer.menu.window.6.mnemonic = 6 +viewer.menu.window.7.label = 7 +viewer.menu.window.7.mnemonic = 7 +viewer.menu.window.8.label = 8 +viewer.menu.window.8.mnemonic = 8 +viewer.menu.window.9.label = 9 +viewer.menu.window.9.mnemonic = 9 ## Add as many entries as you want, to viewer.menu.window.X.label and mnemonic ## where X is an incrementing integer. The mnemonic should be one unique ## character found within the label ## Help Menu and submenu items -viewer.menu.help.label=Help -viewer.menu.help.mnemonic=H -viewer.menu.help.about.label=About ICEpdf viewer... +viewer.menu.help.label = Help +viewer.menu.help.mnemonic = H +viewer.menu.help.about.label = About ICEpdf viewer... ## General error dialog -viewer.dialog.error.exception.title=ICEsoft ICEpdf - Exception -viewer.dialog.error.exception.msg=There was an error executing your command do to the following exception\n{0}. +viewer.dialog.error.exception.title = ICEsoft ICEpdf - Exception +viewer.dialog.error.exception.msg = \ + There was an error executing your command do to the following exception\n\ + {0}. ## Open File Dialog -viewer.dialog.openFile.title=Open File -viewer.dialog.openFile.error.title=ICEsoft ICEpdf - Open File Error -viewer.dialog.openFile.error.msg=ICEpdf could not open the specified file at {0}\nThe file may be corrupt or not a supported file type. +viewer.dialog.openFile.title = Open File +viewer.dialog.openFile.error.title = ICEsoft ICEpdf - Open File Error +viewer.dialog.openFile.error.msg = \ + ICEpdf could not open the specified file at {0}\n\ + The file may be corrupt or not a supported file type. -viewer.dialog.openDocument.pdfException.title=ICEsoft ICEpdf - PDF Exception -viewer.dialog.openDocument.pdfException.msg=ICEpdf could not open the specified file {0} \nThe file may be corrupt or not a supported file type. +viewer.dialog.openDocument.pdfException.title = ICEsoft ICEpdf - PDF Exception +viewer.dialog.openDocument.pdfException.msg = \ + ICEpdf could not open the specified file {0} \n\ + The file may be corrupt or not a supported file type. -viewer.dialog.openDocument.pdfSecurityException.title=ICEsoft ICEpdf - PDF Security Exception -viewer.dialog.openDocument.pdfSecurityException.msg=ICEpdf could not open the encrypted file at {0}\nThis may be the result of an invalid password or a missing JCE Security Provider.\n\nPlease refer to ICEpdf Developer's Guide for more information. +viewer.dialog.openDocument.pdfSecurityException.title = ICEsoft ICEpdf - PDF Security Exception +viewer.dialog.openDocument.pdfSecurityException.msg = \ + ICEpdf could not open the encrypted file at {0}\n\ + This may be the result of an invalid password or a missing JCE Security Provider.\n\n\ + Please refer to ICEpdf Developer's Guide for more information. -viewer.dialog.openDocument.exception.title=ICEsoft ICEpdf - Exception -viewer.dialog.openDocument.exception.msg=ICEpdf could not open the specified file at {0}\nThe file may be corrupt or not a supported file type. +viewer.dialog.openDocument.exception.title = ICEsoft ICEpdf - Exception +viewer.dialog.openDocument.exception.msg = \ + ICEpdf could not open the specified file at {0}\n\ + The file may be corrupt or not a supported file type. -viewer.dialog.openURL.exception.title=ICEsoft ICEpdf - URL Exception -viewer.dialog.openURL.exception.msg=ICEpdf could not open the specified file. {0} \nat URL: {1} -viewer.dialog.openURL.downloading.msg=Downloading {0} +viewer.dialog.openURL.exception.title = ICEsoft ICEpdf - URL Exception +viewer.dialog.openURL.exception.msg = \ + ICEpdf could not open the specified file. {0} \n\ + at URL: {1} +viewer.dialog.openURL.downloading.msg = Downloading {0} ## General error dialog -viewer.dialog.information.copyAll.title=ICEsoft ICEpdf - Information -viewer.dialog.information.copyAll.msg=The document has more than {0} pages, please use\n"Export text..." to extract document text. +viewer.dialog.information.copyAll.title = ICEsoft ICEpdf - Information +viewer.dialog.information.copyAll.msg = \ + The document has more than {0} pages, please use\n\ + "Export text..." to extract document text. ## Open URL Dialog -viewer.dialog.security.title=Document Security -viewer.dialog.security.msg=This PDF is protected -viewer.dialog.security.password.label=Password: -viewer.dialog.security.okButton.label=Ok -viewer.dialog.security.okButton.mnemonic=O -viewer.dialog.security.cancelButton.label=Cancel -viewer.dialog.security.cancelButton.mnemonic=C +viewer.dialog.security.title = Document Security +viewer.dialog.security.msg = This PDF is protected +viewer.dialog.security.password.label = Password: +viewer.dialog.security.okButton.label = Ok +viewer.dialog.security.okButton.mnemonic = O +viewer.dialog.security.cancelButton.label = Cancel +viewer.dialog.security.cancelButton.mnemonic = C ## Open URL Dialog -viewer.dialog.openURL.title=Open URL +viewer.dialog.openURL.title = Open URL ### Save a Copy Dialog -viewer.dialog.saveAs.title=Save As -viewer.dialog.saveAs.extensionError.title=ICEsoft ICEpdf - Save Error -viewer.dialog.saveAs.extensionError.msg=ICEpdf could not save to {0} because it is not a supported file type. -viewer.dialog.saveAs.noExtensionError.title=ICEsoft ICEpdf - Save Error -viewer.dialog.saveAs.noExtensionError.msg=Please specify a file extension. -viewer.dialog.saveAs.noneUniqueName.title=ICEsoft ICEpdf - Save Error -viewer.dialog.saveAs.noneUniqueName.msg=The file named {0} already exists. Please specify a unique name. -viewer.dialog.saveAs.noPermission.title=ICEpdf Viewer RI - Save Error -viewer.dialog.saveAs.noPermission.msg=You do not have permission or the credentials to save this document. -viewer.dialog.saveAs.noUpdates.title=ICEpdf Viewer RI -viewer.dialog.saveAs.noUpdates.msg=Document changes will not be saved, please upgrade to ICEpdf PRO. -viewer.dialog.saveOnClose.noUpdates.title=ICEpdf Viewer RI -viewer.dialog.saveOnClose.noUpdates.msg=Do you want to save changes to {0}? +viewer.dialog.saveAs.title = Save As +viewer.dialog.saveAs.extensionError.title = ICEsoft ICEpdf - Save Error +viewer.dialog.saveAs.extensionError.msg = \ + ICEpdf could not save to {0} because it is not a supported file type. +viewer.dialog.saveAs.noExtensionError.title = ICEsoft ICEpdf - Save Error +viewer.dialog.saveAs.noExtensionError.msg = Please specify a file extension. +viewer.dialog.saveAs.noneUniqueName.title = ICEsoft ICEpdf - Save Error +viewer.dialog.saveAs.noneUniqueName.msg = \ + The file named {0} already exists. Please specify a unique name. +viewer.dialog.saveAs.noPermission.title = ICEpdf Viewer RI - Save Error +viewer.dialog.saveAs.noPermission.msg = You do not have permission or the credentials to save this document. +viewer.dialog.saveAs.noUpdates.title = ICEpdf Viewer RI +viewer.dialog.saveAs.noUpdates.msg = Document changes will not be saved, please upgrade to ICEpdf PRO. +viewer.dialog.saveOnClose.noUpdates.title = ICEpdf Viewer RI +viewer.dialog.saveOnClose.noUpdates.msg = Do you want to save changes to {0}? ## Export Text Dialog -viewer.dialog.exportText.title=Export Document Text -viewer.dialog.exportText.progress.msg=Extracting PDF Text -viewer.dialog.exportText.noExtensionError.title=ICEsoft ICEpdf - Save Error -viewer.dialog.exportText.noExtensionError.msg=Please specify a file extension. +viewer.dialog.exportText.title = Export Document Text +viewer.dialog.exportText.progress.msg = Extracting PDF Text +viewer.dialog.exportText.noExtensionError.title = ICEsoft ICEpdf - Save Error +viewer.dialog.exportText.noExtensionError.msg = Please specify a file extension. # Text extraction output file -viewer.exportText.fileStamp.msg=ICEsoft ICEpdf Viewer, (c) ICEsoft Technologies, Inc. -viewer.exportText.pageStamp.msg= +viewer.exportText.fileStamp.msg = ICEsoft ICEpdf Viewer, (c) ICEsoft Technologies, Inc. +viewer.exportText.pageStamp.msg = # Completed x out of y page(s). -viewer.exportText.fileStamp.progress.msg=Completed {0} out of {1}. -viewer.exportText.fileStamp.progress.oneFile.msg={2} page -viewer.exportText.fileStamp.progress.moreFile.msg={2} pages +viewer.exportText.fileStamp.progress.msg = \ + Completed {0} out of {1}. +viewer.exportText.fileStamp.progress.oneFile.msg = {2} page +viewer.exportText.fileStamp.progress.moreFile.msg = {2} pages ## Export SVG Dialog -viewer.dialog.exportSVG.title=Export to SVG -viewer.dialog.exportSVG.status.exporting.msg=Exporting page {0} to SVG file {1} ... -viewer.dialog.exportSVG.status.error.msg=Problem exporting page {0} to SVG file {1} : {2} -viewer.dialog.exportSVG.status.finished.msg=Finished exporting page {0} to SVG file {1} -viewer.dialog.exportSVG.noExtensionError.title=ICEsoft ICEpdf - SVG Error -viewer.dialog.exportSVG.noExtensionError.msg=Please specify a file extension. -viewer.dialog.exportSVG.exportError.title=ICEsoft ICEpdf - SVG Error -viewer.dialog.exportSVG.exportError.msg=ICEpdf could not export to {0} \nbecuase it is either not a supported file type or \nbecause the file has been corrupted. +viewer.dialog.exportSVG.title = Export to SVG +viewer.dialog.exportSVG.status.exporting.msg = Exporting page {0} to SVG file {1} ... +viewer.dialog.exportSVG.status.error.msg = \ + Problem exporting page {0} to SVG file {1} : {2} +viewer.dialog.exportSVG.status.finished.msg = \ + Finished exporting page {0} to SVG file {1} +viewer.dialog.exportSVG.noExtensionError.title = ICEsoft ICEpdf - SVG Error +viewer.dialog.exportSVG.noExtensionError.msg = Please specify a file extension. +viewer.dialog.exportSVG.exportError.title = ICEsoft ICEpdf - SVG Error +viewer.dialog.exportSVG.exportError.msg = \ + ICEpdf could not export to {0} \n\ + becuase it is either not a supported file type or \n\ + because the file has been corrupted. # Printing Progress bar -viewer.dialog.printing.status.progress.msg=Page {0} of {1} -viewer.dialog.printing.status.start.msg=Spooling Page(s) to Printer +viewer.dialog.printing.status.progress.msg = Page {0} of {1} +viewer.dialog.printing.status.start.msg = Spooling Page(s) to Printer ## Document Permissions Dialog -viewer.dialog.documentPermissions.title=Document Permissions -viewer.dialog.documentPermissions.securityMethod.label=Security Method: -viewer.dialog.documentPermissions.userPassword.label=User Password: -viewer.dialog.documentPermissions.ownerPassword.label=Owner Password: -viewer.dialog.documentPermissions.printing.label=Printing: -viewer.dialog.documentPermissions.changing.label=Changing the Document: -viewer.dialog.documentPermissions.copyExtraction.label=Content Copying or Extraction: -viewer.dialog.documentPermissions.comments.label=Aurthoring Comments and Form Fields: -viewer.dialog.documentPermissions.formFillingIn.label=Form Field Fill-in or Signing: -viewer.dialog.documentPermissions.accessibility.label=Content Accessibility Enabled: -viewer.dialog.documentPermissions.assembly.label=Document Assembly: -viewer.dialog.documentPermissions.encryptionLevel.label=Encryption Level: -viewer.dialog.documentPermissions.securityLevel={0}-bit v{1} R {2} -viewer.dialog.documentPermissions.none=None -viewer.dialog.documentPermissions.no=No -viewer.dialog.documentPermissions.yes=Yes -viewer.dialog.documentPermissions.allowed=Allowed -viewer.dialog.documentPermissions.notAllowed=Not Allowed -viewer.dialog.documentPermissions.fullyAllowed=Fully Allowed -viewer.dialog.documentPermissions.standardSecurity=Adobe Acrobat Standard Security -viewer.dialog.documentPermissions.partial=Partial (Low Quality) +viewer.dialog.documentPermissions.title = Document Permissions +viewer.dialog.documentPermissions.securityMethod.label = Security Method: +viewer.dialog.documentPermissions.userPassword.label = User Password: +viewer.dialog.documentPermissions.ownerPassword.label = Owner Password: +viewer.dialog.documentPermissions.printing.label = Printing: +viewer.dialog.documentPermissions.changing.label = Changing the Document: +viewer.dialog.documentPermissions.copyExtraction.label = Content Copying or Extraction: +viewer.dialog.documentPermissions.comments.label = Aurthoring Comments and Form Fields: +viewer.dialog.documentPermissions.formFillingIn.label = Form Field Fill-in or Signing: +viewer.dialog.documentPermissions.accessibility.label = Content Accessibility Enabled: +viewer.dialog.documentPermissions.assembly.label = Document Assembly: +viewer.dialog.documentPermissions.encryptionLevel.label = Encryption Level: +viewer.dialog.documentPermissions.securityLevel = {0}-bit v{1} R {2} +viewer.dialog.documentPermissions.none = None +viewer.dialog.documentPermissions.no = No +viewer.dialog.documentPermissions.yes = Yes +viewer.dialog.documentPermissions.allowed = Allowed +viewer.dialog.documentPermissions.notAllowed = Not Allowed +viewer.dialog.documentPermissions.fullyAllowed = Fully Allowed +viewer.dialog.documentPermissions.standardSecurity = Adobe Acrobat Standard Security +viewer.dialog.documentPermissions.partial = Partial (Low Quality) ## Document Information Dialog -viewer.dialog.documentInformation.title=Document Information -viewer.dialog.documentInformation.title.label=Title: -viewer.dialog.documentInformation.subject.label=Subject: -viewer.dialog.documentInformation.author.label=Author: -viewer.dialog.documentInformation.keywords.label=Keywords: -viewer.dialog.documentInformation.creator.label=Creator: -viewer.dialog.documentInformation.producer.label=Producer: -viewer.dialog.documentInformation.created.label=Created: -viewer.dialog.documentInformation.modified.label=Modified: -viewer.dialog.documentInformation.notAvailable=Not Available +viewer.dialog.documentInformation.title = Document Information +viewer.dialog.documentInformation.title.label = Title: +viewer.dialog.documentInformation.subject.label = Subject: +viewer.dialog.documentInformation.author.label = Author: +viewer.dialog.documentInformation.keywords.label = Keywords: +viewer.dialog.documentInformation.creator.label = Creator: +viewer.dialog.documentInformation.producer.label = Producer: +viewer.dialog.documentInformation.created.label = Created: +viewer.dialog.documentInformation.modified.label = Modified: +viewer.dialog.documentInformation.notAvailable = Not Available ## Go to Page Dialog -viewer.dialog.goToPage.title=Go to Page... -viewer.dialog.goToPage.description.label=Page Number +viewer.dialog.goToPage.title = Go to Page... +viewer.dialog.goToPage.description.label = Page Number ## About Dialog -viewer.dialog.about.title=About ICEpdf Viewer -viewer.dialog.about.pageNumber.label=\n\nCheck the ICEpdf web site for the latest news:\nhttp://www.icepdf.org/ \n\n +viewer.dialog.about.title = About ICEpdf Viewer +viewer.dialog.about.pageNumber.label = \n\ +\n\ +Check the ICEpdf web site for the latest news:\n\ +http://www.icepdf.org/ \n\n ## Font Properties Dialog -viewer.dialog.fonts.title=Document Font Properties -viewer.dialog.fonts.border.label=Fonts used by this document -viewer.dialog.fonts.info.type.label=Type: {0} -viewer.dialog.fonts.info.encoding.label=Encoding: {0} -viewer.dialog.fonts.info.substitution.type.label=Actual Type: {0} -viewer.dialog.fonts.info.substitution.path.label=Path: {0} -viewer.dialog.fonts.searching.label=Collecting font data ({0}%). -viewer.dialog.fonts.resetCache.label=Reset Cache -viewer.dialog.fonts.resetCache.tip=Reset font properties cache file and rescan system for new fonts. +viewer.dialog.fonts.title = Document Font Properties +viewer.dialog.fonts.border.label = Fonts used by this document +viewer.dialog.fonts.info.type.label = Type: {0} +viewer.dialog.fonts.info.encoding.label = Encoding: {0} +viewer.dialog.fonts.info.substitution.type.label = Actual Type: {0} +viewer.dialog.fonts.info.substitution.path.label = Path: {0} +viewer.dialog.fonts.searching.label = Collecting font data ({0}%). +viewer.dialog.fonts.resetCache.label = Reset Cache +viewer.dialog.fonts.resetCache.tip = Reset font properties cache file and rescan system for new fonts. ## Utility Pane Bookmarks Tab -viewer.utilityPane.bookmarks.tab.title=Bookmarks +viewer.utilityPane.bookmarks.tab.title = Bookmarks ## Utility Pane Bookmarks Tab -viewer.utilityPane.attachments.tab.title=Attachments -viewer.utilityPane.attachments.column.fileName.title=Name -viewer.utilityPane.attachments.column.description.title=Description -viewer.utilityPane.attachments.column.modified.title=Modified -viewer.utilityPane.attachments.column.size.title=Size -viewer.utilityPane.attachments.column.compressedSize.title=Compressed size -viewer.utilityPane.attachments.menu.saveAs.label=Save As... -viewer.utilityPane.attachments.saveAs.replace.title=ICEsoft ICEpdf - Save Error -viewer.utilityPane.attachments.saveAs.replace.msg=The file named {0} already exists. Do you want to replace It? +viewer.utilityPane.attachments.tab.title = Attachments +viewer.utilityPane.attachments.column.fileName.title = Name +viewer.utilityPane.attachments.column.description.title = Description +viewer.utilityPane.attachments.column.modified.title = Modified +viewer.utilityPane.attachments.column.size.title = Size +viewer.utilityPane.attachments.column.compressedSize.title = Compressed size +viewer.utilityPane.attachments.menu.saveAs.label = Save As... +viewer.utilityPane.attachments.saveAs.replace.title = ICEsoft ICEpdf - Save Error +viewer.utilityPane.attachments.saveAs.replace.msg = \ + The file named {0} already exists. Do you want to replace It? ## Utility Pane Thumbnails -viewer.utilityPane.thumbs.tab.title=Thumbnails +viewer.utilityPane.thumbs.tab.title = Thumbnails ## Layers Pane -viewer.utilityPane.layers.tab.title=Layers +viewer.utilityPane.layers.tab.title = Layers ## Signature Pane -viewer.utilityPane.signatures.tab.title=Signatures -viewer.utilityPane.signatures.tab.certTree.error.label=Unsigned Signature Fields Signer certificate could not be validated {0} {1} -viewer.utilityPane.signatures.tab.certTree.rootSigned.label=Signed by {0} {1} -viewer.utilityPane.signatures.tab.certTree.rootValidating.label=Validating signature {0} {1} -viewer.utilityPane.signatures.tab.certTree.cert.invalid.label=Signature is invalid: -viewer.utilityPane.signatures.tab.certTree.cert.unknown.label=Signature is valid: -viewer.utilityPane.signatures.tab.certTree.cert.valid.label=Signature validity is unknown: -viewer.utilityPane.signatures.tab.certTree.doc.modified.label=This version of the document is unaltered but subsequent changes have been made -viewer.utilityPane.signatures.tab.certTree.doc.unmodified.label=Document has not been modified since it was signed -viewer.utilityPane.signatures.tab.certTree.doc.major.label=Document has been altered or corrupted since it was signed -viewer.utilityPane.signatures.tab.certTree.signature.identity.unknown.label=Signer's identity is unknown because it could not be found in your keystore -viewer.utilityPane.signatures.tab.certTree.signature.identity.unchecked.label=Signature is valid, but revocation of the signer's identity could not be checked -viewer.utilityPane.signatures.tab.certTree.signature.identity.valid.label=Signer's identity is valid -viewer.utilityPane.signatures.tab.certTree.signature.time.local.label=Signing time is from the clock on this signer's computer -viewer.utilityPane.signatures.tab.certTree.signature.time.embedded.label=Signature included an embedded timestamp but it could not be validated -viewer.utilityPane.signatures.tab.certTree.signature.details.label=Signature Details -viewer.utilityPane.signatures.tab.certTree.signature.details.reason.label=Reason: {0} -viewer.utilityPane.signatures.tab.certTree.signature.details.location.label=Location: {0} -viewer.utilityPane.signatures.tab.certTree.signature.details.full.label=Certificate Details... -viewer.utilityPane.signatures.tab.certTree.signature.lastChecked.label=Last Checked: {0} -viewer.utilityPane.signatures.tab.certTree.unsigned.label=Unsigned Signature Fields +viewer.utilityPane.signatures.tab.title = Signatures +viewer.utilityPane.signatures.tab.certTree.error.label = \ + Unsigned Signature Fields Signer certificate could not be validated {0} {1} +viewer.utilityPane.signatures.tab.certTree.rootSigned.label = Signed by {0} {1} +viewer.utilityPane.signatures.tab.certTree.rootValidating.label = Validating signature {0} {1} +viewer.utilityPane.signatures.tab.certTree.cert.invalid.label = Signature is invalid: +viewer.utilityPane.signatures.tab.certTree.cert.unknown.label = Signature is valid: +viewer.utilityPane.signatures.tab.certTree.cert.valid.label = Signature validity is unknown: +viewer.utilityPane.signatures.tab.certTree.doc.modified.label = \ + This version of the document is unaltered but subsequent changes have been made +viewer.utilityPane.signatures.tab.certTree.doc.unmodified.label = Document has not been modified since it was signed +viewer.utilityPane.signatures.tab.certTree.doc.major.label = Document has been altered or corrupted since it was signed +viewer.utilityPane.signatures.tab.certTree.signature.identity.unknown.label = \ + Signer's identity is unknown because it could not be found in your keystore +viewer.utilityPane.signatures.tab.certTree.signature.identity.unchecked.label = \ + Signature is valid, but revocation of the signer's identity could not be checked +viewer.utilityPane.signatures.tab.certTree.signature.identity.valid.label = Signer's identity is valid +viewer.utilityPane.signatures.tab.certTree.signature.time.local.label = Signing time is from the clock on this signer's computer +viewer.utilityPane.signatures.tab.certTree.signature.time.embedded.label = \ + Signature included an embedded timestamp but it could not be validated +viewer.utilityPane.signatures.tab.certTree.signature.details.label = Signature Details +viewer.utilityPane.signatures.tab.certTree.signature.details.reason.label = Reason: {0} +viewer.utilityPane.signatures.tab.certTree.signature.details.location.label = Location: {0} +viewer.utilityPane.signatures.tab.certTree.signature.details.full.label = Certificate Details... +viewer.utilityPane.signatures.tab.certTree.signature.lastChecked.label = Last Checked: {0} +viewer.utilityPane.signatures.tab.certTree.unsigned.label = Unsigned Signature Fields ## Signature certificate view dialog. -viewer.utilityPane.signatures.cert.dialog.title=Certificate Details -viewer.utilityPane.signatures.cert.dialog.closeButton.label=Close -viewer.utilityPane.signatures.cert.dialog.closeButton.mnemonic=C -viewer.utilityPane.signatures.cert.dialog.info.notAvailable.label=N/A -viewer.utilityPane.signatures.cert.dialog.info.unknownSubject.label=N/A Subject -viewer.utilityPane.signatures.cert.dialog.info.unknownIssuer.label=N/A Issuer -viewer.utilityPane.signatures.cert.dialog.info.certificateInfo.label={0} - {1} -viewer.utilityPane.signatures.cert.dialog.info.column1.label=Field -viewer.utilityPane.signatures.cert.dialog.info.column2.label=Value -viewer.utilityPane.signatures.cert.dialog.info.version.label=Version -viewer.utilityPane.signatures.cert.dialog.info.version.value=v{0} -viewer.utilityPane.signatures.cert.dialog.info.serialNumber.label=Serial Number -viewer.utilityPane.signatures.cert.dialog.info.serialNumber.value={0} -viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.label=Signature Algorithm -viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.value={0} -viewer.utilityPane.signatures.cert.dialog.info.issuer.label=Issuer -viewer.utilityPane.signatures.cert.dialog.info.issuer.value=Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} -viewer.utilityPane.signatures.cert.dialog.info.validity.label=Validity -viewer.utilityPane.signatures.cert.dialog.info.validity.value=From: {0}\n To: {1} -viewer.utilityPane.signatures.cert.dialog.info.subject.label=Subject -viewer.utilityPane.signatures.cert.dialog.info.subject.value=Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} -viewer.utilityPane.signatures.cert.dialog.info.signature.label=Signature -viewer.utilityPane.signatures.cert.dialog.info.md5.label=MD5 Fingerprint -viewer.utilityPane.signatures.cert.dialog.info.md5.value={0} -viewer.utilityPane.signatures.cert.dialog.info.sha1.label=SHA1 Fingerprint -viewer.utilityPane.signatures.cert.dialog.info.sha1.value={0} -viewer.utilityPane.signatures.verify.initializingMessage.label=Validating {0} of {1} Signatures -viewer.utilityPane.signatures.verify.completeMessage.label=Validating process complete -viewer.utilityPane.signatures.verify.validating.label=Validating signature... +viewer.utilityPane.signatures.cert.dialog.title = Certificate Details +viewer.utilityPane.signatures.cert.dialog.closeButton.label = Close +viewer.utilityPane.signatures.cert.dialog.closeButton.mnemonic = C +viewer.utilityPane.signatures.cert.dialog.info.notAvailable.label = N/A +viewer.utilityPane.signatures.cert.dialog.info.unknownSubject.label = N/A Subject +viewer.utilityPane.signatures.cert.dialog.info.unknownIssuer.label = N/A Issuer +viewer.utilityPane.signatures.cert.dialog.info.certificateInfo.label = {0} - {1} +viewer.utilityPane.signatures.cert.dialog.info.column1.label = Field +viewer.utilityPane.signatures.cert.dialog.info.column2.label = Value +viewer.utilityPane.signatures.cert.dialog.info.version.label = Version +viewer.utilityPane.signatures.cert.dialog.info.version.value = v{0} +viewer.utilityPane.signatures.cert.dialog.info.serialNumber.label = Serial Number +viewer.utilityPane.signatures.cert.dialog.info.serialNumber.value = {0} +viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.label = Signature Algorithm +viewer.utilityPane.signatures.cert.dialog.info.signatureAlgorithm.value = {0} +viewer.utilityPane.signatures.cert.dialog.info.issuer.label = Issuer +viewer.utilityPane.signatures.cert.dialog.info.issuer.value = \ + Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} +viewer.utilityPane.signatures.cert.dialog.info.validity.label = Validity +viewer.utilityPane.signatures.cert.dialog.info.validity.value = From: {0}\n To: {1} +viewer.utilityPane.signatures.cert.dialog.info.subject.label = Subject +viewer.utilityPane.signatures.cert.dialog.info.subject.value = \ + Organization: {0} \nOrganization Unit: {1} \nCommon Name: {2} \nLocal: {3} \nState: {4} \nCountry: {5} \nEmail: {6} +viewer.utilityPane.signatures.cert.dialog.info.signature.label = Signature +viewer.utilityPane.signatures.cert.dialog.info.md5.label = MD5 Fingerprint +viewer.utilityPane.signatures.cert.dialog.info.md5.value = {0} +viewer.utilityPane.signatures.cert.dialog.info.sha1.label = SHA1 Fingerprint +viewer.utilityPane.signatures.cert.dialog.info.sha1.value = {0} +viewer.utilityPane.signatures.verify.initializingMessage.label = Validating {0} of {1} Signatures +viewer.utilityPane.signatures.verify.completeMessage.label = Validating process complete +viewer.utilityPane.signatures.verify.validating.label = Validating signature... ## Annotation Tab -viewer.utilityPane.annotation.tab.title=Annotations +viewer.utilityPane.annotation.tab.title = Annotations ## Utility Pane Annotation Link Tab -viewer.utilityPane.annotation.link.appearance.title=Link Annotation -viewer.utilityPane.annotation.link.highlightType=Highlight Style: -viewer.utilityPane.annotation.link.none=None -viewer.utilityPane.annotation.link.invert=Invert` -viewer.utilityPane.annotation.link.outline=Outline -viewer.utilityPane.annotation.link.push=Push +viewer.utilityPane.annotation.link.appearance.title = Link Annotation +viewer.utilityPane.annotation.link.highlightType = Highlight Style: +viewer.utilityPane.annotation.link.none = None +viewer.utilityPane.annotation.link.invert = Invert` +viewer.utilityPane.annotation.link.outline = Outline +viewer.utilityPane.annotation.link.push = Push ## Utility Pane Annotation text markup Tab -viewer.utilityPane.annotation.textMarkup.appearance.title=Text Markup Annotation -viewer.utilityPane.annotation.textMarkup.highlightType=Type: -viewer.utilityPane.annotation.textMarkup.colorChooserTitle=MarKup Color -viewer.utilityPane.annotation.textMarkup.colorLabel=Color: -viewer.utilityPane.annotation.textMarkup.transparencyLabel=Transparency: +viewer.utilityPane.annotation.textMarkup.appearance.title = Text Markup Annotation +viewer.utilityPane.annotation.textMarkup.highlightType = Type: +viewer.utilityPane.annotation.textMarkup.colorChooserTitle = MarKup Color +viewer.utilityPane.annotation.textMarkup.colorLabel = Color: +viewer.utilityPane.annotation.textMarkup.transparencyLabel = Transparency: ## Utility Pane Annotation line Tab -viewer.utilityPane.annotation.line.appearance.title=Line Annotation -viewer.utilityPane.annotation.line.lineThickness=Line Thickness: -viewer.utilityPane.annotation.line.lineStyle=Line Style: -viewer.utilityPane.annotation.line.startStyle=Start: -viewer.utilityPane.annotation.line.endStyle=End: -viewer.utilityPane.annotation.line.colorChooserTitle=Line Color -viewer.utilityPane.annotation.line.colorInternalChooserTitle=Line Internal Color -viewer.utilityPane.annotation.line.colorLabel=Color: -viewer.utilityPane.annotation.line.colorInternalLabel=Fill Color: -viewer.utilityPane.annotation.line.end.none=None -viewer.utilityPane.annotation.line.end.openArrow=Open Arrow -viewer.utilityPane.annotation.line.end.closedArrow=Closed Arrow -viewer.utilityPane.annotation.line.end.diamond=Diamond -viewer.utilityPane.annotation.line.end.square=Square -viewer.utilityPane.annotation.line.end.circle=Circle -viewer.utilityPane.annotation.line.transparencyLabel=Transparency: +viewer.utilityPane.annotation.line.appearance.title = Line Annotation +viewer.utilityPane.annotation.line.lineThickness = Line Thickness: +viewer.utilityPane.annotation.line.lineStyle = Line Style: +viewer.utilityPane.annotation.line.startStyle = Start: +viewer.utilityPane.annotation.line.endStyle = End: +viewer.utilityPane.annotation.line.colorChooserTitle = Line Color +viewer.utilityPane.annotation.line.colorInternalChooserTitle = Line Internal Color +viewer.utilityPane.annotation.line.colorLabel = Color: +viewer.utilityPane.annotation.line.colorInternalLabel = Fill Color: +viewer.utilityPane.annotation.line.end.none = None +viewer.utilityPane.annotation.line.end.openArrow = Open Arrow +viewer.utilityPane.annotation.line.end.closedArrow = Closed Arrow +viewer.utilityPane.annotation.line.end.diamond = Diamond +viewer.utilityPane.annotation.line.end.square = Square +viewer.utilityPane.annotation.line.end.circle = Circle +viewer.utilityPane.annotation.line.transparencyLabel = Transparency: ## Utility Pane Annotation square Tab -viewer.utilityPane.annotation.square.appearance.title=Square Annotation -viewer.utilityPane.annotation.square.lineThickness=Border Thickness: -viewer.utilityPane.annotation.square.lineStyle=Border Style: -viewer.utilityPane.annotation.square.colorBorderChooserTitle=Border Color -viewer.utilityPane.annotation.square.colorInteriorChooserTitle=Fill Color -viewer.utilityPane.annotation.square.borderTypeLabel=Border Type: -viewer.utilityPane.annotation.square.colorBorderLabel=Border Color: -viewer.utilityPane.annotation.square.colorInteriorLabel=Fill Color: -viewer.utilityPane.annotation.square.fillTypeLabel=Fill Type: -viewer.utilityPane.annotation.square.transparencyLabel=Transparency: +viewer.utilityPane.annotation.square.appearance.title = Square Annotation +viewer.utilityPane.annotation.square.lineThickness = Border Thickness: +viewer.utilityPane.annotation.square.lineStyle = Border Style: +viewer.utilityPane.annotation.square.colorBorderChooserTitle = Border Color +viewer.utilityPane.annotation.square.colorInteriorChooserTitle = Fill Color +viewer.utilityPane.annotation.square.borderTypeLabel = Border Type: +viewer.utilityPane.annotation.square.colorBorderLabel = Border Color: +viewer.utilityPane.annotation.square.colorInteriorLabel = Fill Color: +viewer.utilityPane.annotation.square.fillTypeLabel = Fill Type: +viewer.utilityPane.annotation.square.transparencyLabel = Transparency: ## Utility Pane Annotation free text Tab -viewer.utilityPane.annotation.freeText.appearance.title=FreeText Annotation -viewer.utilityPane.annotation.freeText.font.name=Font Name: -viewer.utilityPane.annotation.freeText.font.style=Font Style: -viewer.utilityPane.annotation.freeText.font.size=Font Size: -viewer.utilityPane.annotation.freeText.font.color=Font Color: -viewer.utilityPane.annotation.freeText.font.color.ChooserTitle=Font Color -viewer.utilityPane.annotation.freeText.border.thickness=Border Thickness: -viewer.utilityPane.annotation.freeText.border.type=Border Type: -viewer.utilityPane.annotation.freeText.border.style=Border Style: -viewer.utilityPane.annotation.freeText.border.color=Border Color: -viewer.utilityPane.annotation.freeText.border.color.ChooserTitle=Border Color -viewer.utilityPane.annotation.freeText.fill.type=Fill Type: -viewer.utilityPane.annotation.freeText.fill.color=Fill Color: -viewer.utilityPane.annotation.freeText.transparencyLabel=Transparency: -viewer.utilityPane.annotation.freeText.fill.color.ChooserTitle=Fill Color -viewer.utilityPane.annotation.freeText.font.dialog=Dialog -viewer.utilityPane.annotation.freeText.font.dialogInput=DialogInput -viewer.utilityPane.annotation.freeText.font.monospaced=Monospaced -viewer.utilityPane.annotation.freeText.font.serif=Serif -viewer.utilityPane.annotation.freeText.font.sanSerif=SansSerif -viewer.utilityPane.annotation.freeText.font.style.plain=Plain -viewer.utilityPane.annotation.freeText.font.style.italic=Italic -viewer.utilityPane.annotation.freeText.font.style.bold=Bold -viewer.utilityPane.annotation.freeText.font.name.helvetica=Helvetica -viewer.utilityPane.annotation.freeText.font.name.helveticaOblique=Helvetica-Oblique -viewer.utilityPane.annotation.freeText.font.name.helveticaBold=Helvetica-Bold -viewer.utilityPane.annotation.freeText.font.name.HelveticaBoldOblique=Helvetica-BoldOblique -viewer.utilityPane.annotation.freeText.font.name.timesItalic=Times-Italic -viewer.utilityPane.annotation.freeText.font.name.timesBold=Times-Bold -viewer.utilityPane.annotation.freeText.font.name.timesBoldItalic=Times-BoldItalic -viewer.utilityPane.annotation.freeText.font.name.timesRoman=Times-Roman -viewer.utilityPane.annotation.freeText.font.name.courier=Courier -viewer.utilityPane.annotation.freeText.font.name.courierOblique=Courier-Oblique -viewer.utilityPane.annotation.freeText.font.name.courierBoldOblique=Courier-BoldOblique -viewer.utilityPane.annotation.freeText.font.name.courierBold=Courier-Bold +viewer.utilityPane.annotation.freeText.appearance.title = FreeText Annotation +viewer.utilityPane.annotation.freeText.font.name = Font Name: +viewer.utilityPane.annotation.freeText.font.style = Font Style: +viewer.utilityPane.annotation.freeText.font.size = Font Size: +viewer.utilityPane.annotation.freeText.font.color = Font Color: +viewer.utilityPane.annotation.freeText.font.color.ChooserTitle = Font Color +viewer.utilityPane.annotation.freeText.border.thickness = Border Thickness: +viewer.utilityPane.annotation.freeText.border.type = Border Type: +viewer.utilityPane.annotation.freeText.border.style = Border Style: +viewer.utilityPane.annotation.freeText.border.color = Border Color: +viewer.utilityPane.annotation.freeText.border.color.ChooserTitle = Border Color +viewer.utilityPane.annotation.freeText.fill.type = Fill Type: +viewer.utilityPane.annotation.freeText.fill.color = Fill Color: +viewer.utilityPane.annotation.freeText.transparencyLabel = Transparency: +viewer.utilityPane.annotation.freeText.fill.color.ChooserTitle = Fill Color +viewer.utilityPane.annotation.freeText.font.dialog = Dialog +viewer.utilityPane.annotation.freeText.font.dialogInput = DialogInput +viewer.utilityPane.annotation.freeText.font.monospaced = Monospaced +viewer.utilityPane.annotation.freeText.font.serif = Serif +viewer.utilityPane.annotation.freeText.font.sanSerif = SansSerif +viewer.utilityPane.annotation.freeText.font.style.plain = Plain +viewer.utilityPane.annotation.freeText.font.style.italic = Italic +viewer.utilityPane.annotation.freeText.font.style.bold = Bold +viewer.utilityPane.annotation.freeText.font.name.helvetica = Helvetica +viewer.utilityPane.annotation.freeText.font.name.helveticaOblique = Helvetica-Oblique +viewer.utilityPane.annotation.freeText.font.name.helveticaBold = Helvetica-Bold +viewer.utilityPane.annotation.freeText.font.name.HelveticaBoldOblique = Helvetica-BoldOblique +viewer.utilityPane.annotation.freeText.font.name.timesItalic = Times-Italic +viewer.utilityPane.annotation.freeText.font.name.timesBold = Times-Bold +viewer.utilityPane.annotation.freeText.font.name.timesBoldItalic = Times-BoldItalic +viewer.utilityPane.annotation.freeText.font.name.timesRoman = Times-Roman +viewer.utilityPane.annotation.freeText.font.name.courier = Courier +viewer.utilityPane.annotation.freeText.font.name.courierOblique = Courier-Oblique +viewer.utilityPane.annotation.freeText.font.name.courierBoldOblique = Courier-BoldOblique +viewer.utilityPane.annotation.freeText.font.name.courierBold = Courier-Bold ## Utility Pane Annotation text Tab -viewer.utilityPane.annotation.text.appearance.title=Text Annotation -viewer.utilityPane.annotation.text.iconName=Icon: -viewer.utilityPane.annotation.text.iconName.comment=Comment -viewer.utilityPane.annotation.text.iconName.check=Check -viewer.utilityPane.annotation.text.iconName.checkMark=CheckMark -viewer.utilityPane.annotation.text.iconName.circle=Circle -viewer.utilityPane.annotation.text.iconName.cross=Cross -viewer.utilityPane.annotation.text.iconName.crossHairs=CrossHairs -viewer.utilityPane.annotation.text.iconName.help=Help -viewer.utilityPane.annotation.text.iconName.insert=Insert -viewer.utilityPane.annotation.text.iconName.key=Key -viewer.utilityPane.annotation.text.iconName.newParagraph=NewParagraph -viewer.utilityPane.annotation.text.iconName.paragraph=Paragraph -viewer.utilityPane.annotation.text.iconName.rightArrow=RightArrow -viewer.utilityPane.annotation.text.iconName.rightPointer=RightPointer -viewer.utilityPane.annotation.text.iconName.star=Star -viewer.utilityPane.annotation.text.iconName.upArrow=UpArrow -viewer.utilityPane.annotation.text.iconName.upLeftArrow=UpLeftArrow +viewer.utilityPane.annotation.text.appearance.title = Text Annotation +viewer.utilityPane.annotation.text.iconName = Icon: +viewer.utilityPane.annotation.text.iconName.comment = Comment +viewer.utilityPane.annotation.text.iconName.check = Check +viewer.utilityPane.annotation.text.iconName.checkMark = CheckMark +viewer.utilityPane.annotation.text.iconName.circle = Circle +viewer.utilityPane.annotation.text.iconName.cross = Cross +viewer.utilityPane.annotation.text.iconName.crossHairs = CrossHairs +viewer.utilityPane.annotation.text.iconName.help = Help +viewer.utilityPane.annotation.text.iconName.insert = Insert +viewer.utilityPane.annotation.text.iconName.key = Key +viewer.utilityPane.annotation.text.iconName.newParagraph = NewParagraph +viewer.utilityPane.annotation.text.iconName.paragraph = Paragraph +viewer.utilityPane.annotation.text.iconName.rightArrow = RightArrow +viewer.utilityPane.annotation.text.iconName.rightPointer = RightPointer +viewer.utilityPane.annotation.text.iconName.star = Star +viewer.utilityPane.annotation.text.iconName.upArrow = UpArrow +viewer.utilityPane.annotation.text.iconName.upLeftArrow = UpLeftArrow ## Utility Pane Annotation circle Tab -viewer.utilityPane.annotation.circle.appearance.title=Circle Annotation -viewer.utilityPane.annotation.circle.lineThickness=Border Thickness: -viewer.utilityPane.annotation.circle.lineStyle=Border Style: -viewer.utilityPane.annotation.circle.colorBorderChooserTitle=Border Color -viewer.utilityPane.annotation.circle.colorInteriorChooserTitle=Interior Color -viewer.utilityPane.annotation.circle.colorBorderLabel=Border Color: -viewer.utilityPane.annotation.circle.colorInteriorLabel=Fill Color: -viewer.utilityPane.annotation.circle.fillTypeLabel=Fill Type: -viewer.utilityPane.annotation.circle.transparencyLabel=Transparency: +viewer.utilityPane.annotation.circle.appearance.title = Circle Annotation +viewer.utilityPane.annotation.circle.lineThickness = Border Thickness: +viewer.utilityPane.annotation.circle.lineStyle = Border Style: +viewer.utilityPane.annotation.circle.colorBorderChooserTitle = Border Color +viewer.utilityPane.annotation.circle.colorInteriorChooserTitle = Interior Color +viewer.utilityPane.annotation.circle.colorBorderLabel = Border Color: +viewer.utilityPane.annotation.circle.colorInteriorLabel = Fill Color: +viewer.utilityPane.annotation.circle.fillTypeLabel = Fill Type: +viewer.utilityPane.annotation.circle.transparencyLabel = Transparency: ## Utility Pane Annotation ink Tab -viewer.utilityPane.annotation.ink.appearance.title=Ink Annotation -viewer.utilityPane.annotation.ink.lineThickness=Ink Thickness: -viewer.utilityPane.annotation.ink.lineStyle=Ink Style: -viewer.utilityPane.annotation.ink.colorBorderChooserTitle=Ink Color -viewer.utilityPane.annotation.ink.colorBorderLabel=Ink Color: -viewer.utilityPane.annotation.ink.transparencyLabel=Transparency: +viewer.utilityPane.annotation.ink.appearance.title = Ink Annotation +viewer.utilityPane.annotation.ink.lineThickness = Ink Thickness: +viewer.utilityPane.annotation.ink.lineStyle = Ink Style: +viewer.utilityPane.annotation.ink.colorBorderChooserTitle = Ink Color +viewer.utilityPane.annotation.ink.colorBorderLabel = Ink Color: +viewer.utilityPane.annotation.ink.transparencyLabel = Transparency: ## Utility Pane border Tab -viewer.utilityPane.annotation.border.title=Border -viewer.utilityPane.annotation.border.linkType=Border Type: -viewer.utilityPane.annotation.border.lineThickness=Border Thickness: -viewer.utilityPane.annotation.border.lineStyle=Border Style: -viewer.utilityPane.annotation.border.colorChooserTitle=Border Color -viewer.utilityPane.annotation.border.colorLabel=Color: -viewer.utilityPane.annotation.border.borderType.visibleRectangle=Visible -viewer.utilityPane.annotation.border.borderType.invisibleRectangle=Invisible -viewer.utilityPane.annotation.border.solid=Solid -viewer.utilityPane.annotation.border.dashed=Dashed -viewer.utilityPane.annotation.border.beveled=Beveled -viewer.utilityPane.annotation.border.inset=Inset -viewer.utilityPane.annotation.border.underline=Underline +viewer.utilityPane.annotation.border.title = Border +viewer.utilityPane.annotation.border.linkType = Border Type: +viewer.utilityPane.annotation.border.lineThickness = Border Thickness: +viewer.utilityPane.annotation.border.lineStyle = Border Style: +viewer.utilityPane.annotation.border.colorChooserTitle = Border Color +viewer.utilityPane.annotation.border.colorLabel = Color: +viewer.utilityPane.annotation.border.borderType.visibleRectangle = Visible +viewer.utilityPane.annotation.border.borderType.invisibleRectangle = Invisible +viewer.utilityPane.annotation.border.solid = Solid +viewer.utilityPane.annotation.border.dashed = Dashed +viewer.utilityPane.annotation.border.beveled = Beveled +viewer.utilityPane.annotation.border.inset = Inset +viewer.utilityPane.annotation.border.underline = Underline ## Utility Pane border Tab -viewer.utilityPane.annotation.flags.title=Flags -viewer.utilityPane.annotation.flags.noRotate=No Rotate: -viewer.utilityPane.annotation.flags.noZoom=No Zoom: -viewer.utilityPane.annotation.flags.readOnly=Read Only: -viewer.utilityPane.annotation.flags.printable=Printable: -viewer.utilityPane.annotation.flags.yes=Printable: -viewer.utilityPane.annotation.flags.enabled=Enabled -viewer.utilityPane.annotation.flags.disabled=Disabled +viewer.utilityPane.annotation.flags.title = Flags +viewer.utilityPane.annotation.flags.noRotate = No Rotate: +viewer.utilityPane.annotation.flags.noZoom = No Zoom: +viewer.utilityPane.annotation.flags.readOnly = Read Only: +viewer.utilityPane.annotation.flags.printable = Printable: +viewer.utilityPane.annotation.flags.yes = Printable: +viewer.utilityPane.annotation.flags.enabled = Enabled +viewer.utilityPane.annotation.flags.disabled = Disabled ## annotation action pane and dialogs. -viewer.utilityPane.action.selectionTitle=Action -viewer.utilityPane.action.addAction=Add -viewer.utilityPane.action.editAction=Edit -viewer.utilityPane.action.removeAction=Remove -viewer.utilityPane.action.type.destination.label=Destination -viewer.utilityPane.action.type.uriAction.label=URI Action -viewer.utilityPane.action.type.goToAction.label=GoTo Action -viewer.utilityPane.action.type.launchAction.label=Launch Action -viewer.utilityPane.action.dialog.new.title=Add New Action -viewer.utilityPane.action.dialog.new.msgs=Action Type: -viewer.utilityPane.action.dialog.delete.title=Delete Confirmation -viewer.utilityPane.action.dialog.delete.msgs=Are you sure your want to delete this action? +viewer.utilityPane.action.selectionTitle = Action +viewer.utilityPane.action.addAction = Add +viewer.utilityPane.action.editAction = Edit +viewer.utilityPane.action.removeAction = Remove +viewer.utilityPane.action.type.destination.label = Destination +viewer.utilityPane.action.type.uriAction.label = URI Action +viewer.utilityPane.action.type.goToAction.label = GoTo Action +viewer.utilityPane.action.type.launchAction.label = Launch Action +viewer.utilityPane.action.dialog.new.title = Add New Action +viewer.utilityPane.action.dialog.new.msgs = Action Type: +viewer.utilityPane.action.dialog.delete.title = Delete Confirmation +viewer.utilityPane.action.dialog.delete.msgs = Are you sure your want to delete this action? ## uri action dialog test -viewer.utilityPane.action.dialog.uri.title=URI Action Properties -viewer.utilityPane.action.dialog.uri.msgs=URI: +viewer.utilityPane.action.dialog.uri.title = URI Action Properties +viewer.utilityPane.action.dialog.uri.msgs = URI: ## launch action dialog test -viewer.utilityPane.action.dialog.launch.title=Launch Action Properties -viewer.utilityPane.action.dialog.launch.msgs=File Path: +viewer.utilityPane.action.dialog.launch.title = Launch Action Properties +viewer.utilityPane.action.dialog.launch.msgs = File Path: ## GoTo action dialog text -viewer.utilityPane.action.dialog.goto.title=GoTo Action Properties -viewer.utilityPane.action.dialog.goto.page.label=Page: -viewer.utilityPane.action.dialog.goto.type.label=Type -viewer.utilityPane.action.dialog.goto.type.xyz.label=Absolute -viewer.utilityPane.action.dialog.goto.type.fit.label=Fit Page -viewer.utilityPane.action.dialog.goto.type.fith.label=Fit Top Width -viewer.utilityPane.action.dialog.goto.type.fitv.label=Fit Left Width -viewer.utilityPane.action.dialog.goto.type.fitr.label=Fit Zoom Box -viewer.utilityPane.action.dialog.goto.type.fitb.label=Fit Page Bounds -viewer.utilityPane.action.dialog.goto.type.fitbh.label=Fit Bounds Top -viewer.utilityPane.action.dialog.goto.type.fitbv.label=Fit Bounds Left -viewer.utilityPane.action.dialog.goto.right.label=Right: -viewer.utilityPane.action.dialog.goto.left.label=Left: -viewer.utilityPane.action.dialog.goto.top.label=Top: -viewer.utilityPane.action.dialog.goto.bottom.label=Bottom: -viewer.utilityPane.action.dialog.goto.zoom.label=Zoom: -viewer.utilityPane.action.dialog.goto.unassigned.label=NaN -viewer.utilityPane.action.dialog.goto.current.label=Current View: -viewer.utilityPane.action.dialog.goto.current=Set Location -viewer.utilityPane.action.dialog.goto.name.label=Name: -viewer.utilityPane.action.dialog.goto.browse=Browse... -viewer.utilityPane.action.dialog.goto.explicitDestination.title=Implicit Destination -viewer.utilityPane.action.dialog.goto.nameDestination.title=Named Destination +viewer.utilityPane.action.dialog.goto.title = GoTo Action Properties +viewer.utilityPane.action.dialog.goto.page.label = Page: +viewer.utilityPane.action.dialog.goto.type.label = Type +viewer.utilityPane.action.dialog.goto.type.xyz.label = Absolute +viewer.utilityPane.action.dialog.goto.type.fit.label = Fit Page +viewer.utilityPane.action.dialog.goto.type.fith.label = Fit Top Width +viewer.utilityPane.action.dialog.goto.type.fitv.label = Fit Left Width +viewer.utilityPane.action.dialog.goto.type.fitr.label = Fit Zoom Box +viewer.utilityPane.action.dialog.goto.type.fitb.label = Fit Page Bounds +viewer.utilityPane.action.dialog.goto.type.fitbh.label = Fit Bounds Top +viewer.utilityPane.action.dialog.goto.type.fitbv.label = Fit Bounds Left +viewer.utilityPane.action.dialog.goto.right.label = Right: +viewer.utilityPane.action.dialog.goto.left.label = Left: +viewer.utilityPane.action.dialog.goto.top.label = Top: +viewer.utilityPane.action.dialog.goto.bottom.label = Bottom: +viewer.utilityPane.action.dialog.goto.zoom.label = Zoom: +viewer.utilityPane.action.dialog.goto.unassigned.label = NaN +viewer.utilityPane.action.dialog.goto.current.label = Current View: +viewer.utilityPane.action.dialog.goto.current = Set Location +viewer.utilityPane.action.dialog.goto.name.label = Name: +viewer.utilityPane.action.dialog.goto.browse = Browse... +viewer.utilityPane.action.dialog.goto.explicitDestination.title = Implicit Destination +viewer.utilityPane.action.dialog.goto.nameDestination.title = Named Destination # Destination Named Tree -viewer.utilityPane.action.dialog.goto.nameTree.title=Document Name Tree -viewer.utilityPane.action.dialog.goto.nameTree.root.label=Name Tree -viewer.utilityPane.action.dialog.goto.nameTree.branch.label={0} to {1} +viewer.utilityPane.action.dialog.goto.nameTree.title = Document Name Tree +viewer.utilityPane.action.dialog.goto.nameTree.root.label = Name Tree +viewer.utilityPane.action.dialog.goto.nameTree.branch.label = {0} to {1} ## Utility Pane Search Tab -viewer.utilityPane.search.tab.title=Search -viewer.utilityPane.search.searchText.label=Search Text: -viewer.utilityPane.search.results.label=Results: -viewer.utilityPane.search.searchButton.label=Search -viewer.utilityPane.search.clearSearchButton.label=Clear -viewer.utilityPane.search.caseSenstiveCheckbox.label=Case-sensitive -viewer.utilityPane.search.wholeWordCheckbox.label=Whole words only -viewer.utilityPane.search.cumlitiveCheckbox.label=Cumulative -viewer.utilityPane.search.showPagesCheckbox.label=Show Pages -viewer.utilityPane.search.stopButton.label=Stop -viewer.utilityPane.search.searching.msg=Search... +viewer.utilityPane.search.tab.title = Search +viewer.utilityPane.search.searchText.label = Search Text: +viewer.utilityPane.search.results.label = Results: +viewer.utilityPane.search.searchButton.label = Search +viewer.utilityPane.search.clearSearchButton.label = Clear +viewer.utilityPane.search.caseSenstiveCheckbox.label = Case-sensitive +viewer.utilityPane.search.wholeWordCheckbox.label = Whole words only +viewer.utilityPane.search.cumlitiveCheckbox.label = Cumulative +viewer.utilityPane.search.showPagesCheckbox.label = Show Pages +viewer.utilityPane.search.stopButton.label = Stop +viewer.utilityPane.search.searching.msg = Search... # Searching x out of y page(s) -viewer.utilityPane.search.searching1.msg=Searching {0} out of {1} -viewer.utilityPane.search.searching1.oneFile.msg={2} page -viewer.utilityPane.search.searching1.moreFile.msg={2} pages +viewer.utilityPane.search.searching1.msg = \ + Searching {0} out of {1} +viewer.utilityPane.search.searching1.oneFile.msg = {2} page +viewer.utilityPane.search.searching1.moreFile.msg = {2} pages # Page x (y result(s)) -viewer.utilityPane.search.result.msg=Page {0} ({1}) -viewer.utilityPane.search.result.oneFile.msg={2} result -viewer.utilityPane.search.result.moreFile.msg={2} results +viewer.utilityPane.search.result.msg = Page {0} ({1}) +viewer.utilityPane.search.result.oneFile.msg = {2} result +viewer.utilityPane.search.result.moreFile.msg = {2} results # Searched x page(s) (y matches) -viewer.utilityPane.search.progress.msg=Searched {0} {1} ({2}) -viewer.utilityPane.search.progress.onePage.msg=page -viewer.utilityPane.search.progress.morePage.msg=pages -viewer.utilityPane.search.progress.oneMatch.msg={2} match -viewer.utilityPane.search.progress.moreMatch.msg={2} matches +viewer.utilityPane.search.progress.msg = \ + Searched {0} {1} ({2}) +viewer.utilityPane.search.progress.onePage.msg = page +viewer.utilityPane.search.progress.morePage.msg = pages +viewer.utilityPane.search.progress.oneMatch.msg = {2} match +viewer.utilityPane.search.progress.moreMatch.msg = {2} matches ## Popup Annotation component -viewer.annotation.popup.reply.label=Reply -viewer.annotation.popup.delete.label=Delete -viewer.annotation.popup.status.label=Set Status -viewer.annotation.popup.status.accepted.label=Accepted -viewer.annotation.popup.status.cancelled.label=Cancelled -viewer.annotation.popup.status.completed.label=Completed -viewer.annotation.popup.status.rejected.label=Rejected -viewer.annotation.popup.status.none.label=None -viewer.annotation.popup.openAll.label=Open all Popups -viewer.annotation.popup.minimizeAll.label=Minimize Popups -viewer.annotation.popup.replyTo.label=Re: {0} -viewer.annotation.popup.status.none.title=None: {0} -viewer.annotation.popup.status.none.msg=None set by {0} -viewer.annotation.popup.status.accepted.title=Accepted: {0} -viewer.annotation.popup.status.accepted.msg=Accepted set by {0} -viewer.annotation.popup.status.cancelled.title=Cancelled: {0} -viewer.annotation.popup.status.cancelled.msg=Cancelled set by {0} -viewer.annotation.popup.status.completed.title=Completed: {0} -viewer.annotation.popup.status.completed.msg=Completed set by {0} -viewer.annotation.popup.status.rejected.title=Rejected: {0} -viewer.annotation.popup.status.rejected.msg=Rejected set by {0} +viewer.annotation.popup.reply.label = Reply +viewer.annotation.popup.delete.label = Delete +viewer.annotation.popup.status.label = Set Status +viewer.annotation.popup.status.accepted.label = Accepted +viewer.annotation.popup.status.cancelled.label = Cancelled +viewer.annotation.popup.status.completed.label = Completed +viewer.annotation.popup.status.rejected.label = Rejected +viewer.annotation.popup.status.none.label = None +viewer.annotation.popup.openAll.label = Open all Popups +viewer.annotation.popup.minimizeAll.label = Minimize Popups +viewer.annotation.popup.replyTo.label = Re: {0} +viewer.annotation.popup.status.none.title = None: {0} +viewer.annotation.popup.status.none.msg = None set by {0} +viewer.annotation.popup.status.accepted.title = Accepted: {0} +viewer.annotation.popup.status.accepted.msg = Accepted set by {0} +viewer.annotation.popup.status.cancelled.title = Cancelled: {0} +viewer.annotation.popup.status.cancelled.msg = Cancelled set by {0} +viewer.annotation.popup.status.completed.title = Completed: {0} +viewer.annotation.popup.status.completed.msg = Completed set by {0} +viewer.annotation.popup.status.rejected.title = Rejected: {0} +viewer.annotation.popup.status.rejected.msg = Rejected set by {0} ## Signature component -viewer.annotation.signature.menu.validateSignature.label=Validate Signature -viewer.annotation.signature.menu.showCertificates.label=Show Certificate Properties -viewer.annotation.signature.menu.signatureProperties.label=Show Signature Properties -viewer.annotation.signature.menu.signaturePageNavigation.label=Go to Page... +viewer.annotation.signature.menu.validateSignature.label = Validate Signature +viewer.annotation.signature.menu.showCertificates.label = Show Certificate Properties +viewer.annotation.signature.menu.signatureProperties.label = Show Signature Properties +viewer.annotation.signature.menu.signaturePageNavigation.label = Go to Page... ## Signature validation dialog. -viewer.annotation.signature.validation.dialog.title=Signature Validation Summary -viewer.annotation.signature.validation.dialog.close.button.label=Close -viewer.annotation.signature.validation.dialog.signerProperties.button.label=Signature Properties... +viewer.annotation.signature.validation.dialog.title = Signature Validation Summary +viewer.annotation.signature.validation.dialog.close.button.label = Close +viewer.annotation.signature.validation.dialog.signerProperties.button.label = Signature Properties... # common validation messages -viewer.annotation.signature.validation.common.invalid.label=Signature is invalid: -viewer.annotation.signature.validation.common.unknown.label=Signature is valid: -viewer.annotation.signature.validation.common.valid.label=Signature validity is unknown: -viewer.annotation.signature.validation.common.signedBy.label=- Signed by {0} {1} -viewer.annotation.signature.validation.common.doc.modified.label=- This version of the document is unaltered but subsequent changes have been made -viewer.annotation.signature.validation.common.doc.unmodified.label=- Document has not been modified since it was signed -viewer.annotation.signature.validation.common.doc.major.label=- Document has been altered or corrupted since it was signed -viewer.annotation.signature.validation.common.identity.unknown.label=- Signer's identity is unknown because it could not be found in your keystore -viewer.annotation.signature.validation.common.identity.unchecked.label=- Signature is valid, but revocation of the signer's identity could not be checked -viewer.annotation.signature.validation.common.identity.valid.label=- Signer's identity is valid -viewer.annotation.signature.validation.common.time.local.label=- Signing time is from the clock on this signer's computer -viewer.annotation.signature.validation.common.time.embedded.label=- Signature included an embedded timestamp but it could not be validated -viewer.annotation.signature.validation.common.notAvailable.label=N/A +viewer.annotation.signature.validation.common.invalid.label = Signature is invalid: +viewer.annotation.signature.validation.common.unknown.label = Signature is valid: +viewer.annotation.signature.validation.common.valid.label = Signature validity is unknown: +viewer.annotation.signature.validation.common.signedBy.label = - Signed by {0} {1} +viewer.annotation.signature.validation.common.doc.modified.label = \ + - This version of the document is unaltered but subsequent changes have been made +viewer.annotation.signature.validation.common.doc.unmodified.label = - Document has not been modified since it was signed +viewer.annotation.signature.validation.common.doc.major.label = - Document has been altered or corrupted since it was signed +viewer.annotation.signature.validation.common.identity.unknown.label = \ + - Signer's identity is unknown because it could not be found in your keystore +viewer.annotation.signature.validation.common.identity.unchecked.label = \ + - Signature is valid, but revocation of the signer's identity could not be checked +viewer.annotation.signature.validation.common.identity.valid.label = - Signer's identity is valid +viewer.annotation.signature.validation.common.time.local.label = - Signing time is from the clock on this signer's computer +viewer.annotation.signature.validation.common.time.embedded.label = \ + - Signature included an embedded timestamp but it could not be validated +viewer.annotation.signature.validation.common.notAvailable.label = N/A ## Signatures properties Dialog. -viewer.annotation.signature.properties.dialog.title=Signature Properties -viewer.annotation.signature.properties.dialog.invalid.label=Signature is invalid -viewer.annotation.signature.properties.dialog.unknown.label=Signature is valid -viewer.annotation.signature.properties.dialog.valid.label=Signature validity is unknown -viewer.annotation.signature.properties.dialog.signedBy.label=Signed by {0} {1} -viewer.annotation.signature.properties.dialog.signingTime.label=Signed time: {0} -viewer.annotation.signature.properties.dialog.reason.label=Reason: {0} -viewer.annotation.signature.properties.dialog.location.label=Location: {0} +viewer.annotation.signature.properties.dialog.title = Signature Properties +viewer.annotation.signature.properties.dialog.invalid.label = Signature is invalid +viewer.annotation.signature.properties.dialog.unknown.label = Signature is valid +viewer.annotation.signature.properties.dialog.valid.label = Signature validity is unknown +viewer.annotation.signature.properties.dialog.signedBy.label = Signed by {0} {1} +viewer.annotation.signature.properties.dialog.signingTime.label = Signed time: {0} +viewer.annotation.signature.properties.dialog.reason.label = Reason: {0} +viewer.annotation.signature.properties.dialog.location.label = Location: {0} # SignatureSigner Info -viewer.annotation.signature.properties.dialog.pathValidation.success=- Path validation checks were successful. -viewer.annotation.signature.properties.dialog.pathValidation.failure=- Path validation checks were unsuccessful. -viewer.annotation.signature.properties.dialog.revocation.success=- Signer's certificate is valid and has not been revoked. -viewer.annotation.signature.properties.dialog.revocation.failure=- Revocation checking was not performed. -viewer.annotation.signature.properties.dialog.certificateExpired.failure=- Signer certificate has expired. -viewer.annotation.signature.properties.dialog.showCertificates.label=Signer's Certificate... -viewer.annotation.signature.properties.dialog.validity.title=Validity Summary -viewer.annotation.signature.properties.dialog.signerInfo.title=Signer Info +viewer.annotation.signature.properties.dialog.pathValidation.success = - Path validation checks were successful. +viewer.annotation.signature.properties.dialog.pathValidation.failure = - Path validation checks were unsuccessful. +viewer.annotation.signature.properties.dialog.revocation.success = - Signer's certificate is valid and has not been revoked. +viewer.annotation.signature.properties.dialog.revocation.failure = - Revocation checking was not performed. +viewer.annotation.signature.properties.dialog.certificateExpired.failure = - Signer certificate has expired. +viewer.annotation.signature.properties.dialog.showCertificates.label = Signer's Certificate... +viewer.annotation.signature.properties.dialog.validity.title = Validity Summary +viewer.annotation.signature.properties.dialog.signerInfo.title = Signer Info ## Common Button Labels -viewer.button.ok.label=Ok -viewer.button.ok.mnemonic=O -viewer.button.cancel.label=Cancel -viewer.button.cancel.mnemonic=C +viewer.button.ok.label = Ok +viewer.button.ok.mnemonic = O +viewer.button.cancel.label = Cancel +viewer.button.cancel.mnemonic = C ## Pilot Specific Mesages -pilot.title=ICEbrowser - ICEpdf Pilot Errror -pilot.loading.msg=Opening document {0} ... -pilot.display.msg=Displaying {0} -pilot.loading.error.msg=PDF Pilot: Failed to load {0}. -pilot.error.classLoading=Required class {0} not found. Required library 'icepdf.jar' may not be on the classpath - PDF Pilot disabled."; +pilot.title = ICEbrowser - ICEpdf Pilot Errror +pilot.loading.msg =Opening document {0} ... +pilot.display.msg = Displaying {0} +pilot.loading.error.msg = PDF Pilot: Failed to load {0}. +pilot.error.classLoading = Required class {0} not found. Required library \ + 'icepdf.jar' may not be on the classpath - PDF Pilot disabled."; ### # General Error Messages # Command Line Errors -viewer.commandLin.error=Usage: java org.icepdf.ri.viewer.Main [-loadfile ] [-loadurl ] +viewer.commandLin.error = \ + Usage: java org.icepdf.ri.viewer.Main [-loadfile ] [-loadurl ] # Launcher errors -viewer.launcher.URLError.dialog.title=ICEsoft ICEpdf -viewer.launcher.URLError.dialog.message=ICEpdf could not open the specified file. {0} at URL: {1}. -viewer.launcher.lookAndFeel.error.message=The specified look-and-feel ({0}) is not accessible from this platform. +viewer.launcher.URLError.dialog.title =ICEsoft ICEpdf +viewer.launcher.URLError.dialog.message = ICEpdf could not open the specified file. {0} at URL: {1}. +viewer.launcher.lookAndFeel.error.message = The specified look-and-feel ({0}) is not accessible from this platform. # Pilot Loading Errors ### parser error dialogs -parse.title=Properties Parsing Error -parse.integer=Warning : {0} is not a correct integer. -parse.float=Warning : {0} is not a correct float. -parse.double=Warning : {0} is not a correct double. -parse.choice=Warning : {0} is not a valid choice. -parse.laf=Warning : look-and-feel {0} is not supported. +parse.title = Properties Parsing Error +parse.integer = Warning : {0} is not a correct integer. +parse.float = Warning : {0} is not a correct float. +parse.double = Warning : {0} is not a correct double. +parse.choice = Warning : {0} is not a valid choice. +parse.laf = Warning : look-and-feel {0} is not supported. ### Properties Manager Errors -manager.properties.title=ICEpdf Properties Manager -fontManager.properties.title=ICEpdf Font Manager +manager.properties.title = ICEpdf Properties Manager +fontManager.properties.title = ICEpdf Font Manager -manager.properties.createNewDirectory=To create the directory {0},\nwhere the ICEpdf Viewer will store changes to its setup, click Yes.\n\nIf you click "No", all changes you make to the ICEpdf Viewer setup\nwill be lost when you quit the application. \n\n +manager.properties.createNewDirectory = \ + To create the directory {0},\n\ + where the ICEpdf Viewer will store changes to its setup, click Yes.\n\n\ + If you click "No", all changes you make to the ICEpdf Viewer setup\n\ + will be lost when you quit the application. \n\n -manager.properties.failedCreation=ICEpdf Viewer directory to store user data can not be created:\n{0}\nICEpdf Viewer will not save changes to its default setup. +manager.properties.failedCreation = \ + ICEpdf Viewer directory to store user data can not be created:\n\ + {0}\n\ + ICEpdf Viewer will not save changes to its default setup. -manager.properties.session.nolock=Error creating the lock file :\n{0}\n +manager.properties.session.nolock = \ + Error creating the lock file :\n\ + {0}\n -manager.properties.session.readError=Error loading properties file: \n{0} +manager.properties.session.readError = \ + Error loading properties file: \n\ + {0} -manager.properties.deleted=Property file has been deleted\n({0})\nRecreate it ? +manager.properties.deleted = Property file has been deleted\n\ + ({0})\n\ + Recreate it ? -manager.properties.modified=Property file has been modified since last update\n({0,date,long})\nWould you like to merge changes in the file with the current properties? +manager.properties.modified = Property file has been modified since last update\n\ +({0,date,long})\n\ +Would you like to merge changes in the file with the current properties? -manager.properties.saveError=Impossible to save property file.\nEncountered the folowing error :\n{0} +manager.properties.saveError = Impossible to save property file.\n\ +Encountered the folowing error :\n\ +{0} -manager.properties.lafError=Look&Feel {0} given in the default properties is unsupported.\nUsing system default. +manager.properties.lafError =\ + Look&Feel {0} given in the default properties is unsupported.\n\ + Using system default. -manager.properties.brokenProperty=Broken default property {0} value: {1} +manager.properties.brokenProperty = Broken default property {0} value: {1} -manager.properties.missingProperty=Missing default property {0} value: {1} +manager.properties.missingProperty = Missing default property {0} value: {1} diff --git a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED index 51f1208f61..1d50092e80 100755 --- a/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/core/Bundle.properties-MERGED @@ -3,7 +3,13 @@ Installer.closing.confirmationDialog.title=Ingest is Running # {0} - exception message Installer.closing.messageBox.caseCloseExceptionMessage=Error closing case: {0} OpenIDE-Module-Display-Category=Infrastructure -OpenIDE-Module-Long-Description=This is the core Autopsy module.\n\nThe module contains the core components needed for the bare application to run; the RCP platform, windowing GUI, sleuthkit bindings, datamodel / storage, explorer, result viewers, content viewers, ingest framework, reporting, and core tools, such as the file search.\n\nThe framework included in the module contains APIs for developing modules for ingest, viewers and reporting. The modules can be deployed as Plugins using the Autopsy plugin installer.\nThis module should not be uninstalled - without it, Autopsy will not run.\n\nFor more information, see http://www.sleuthkit.org/autopsy/ +OpenIDE-Module-Long-Description=\ + This is the core Autopsy module.\n\n\ + The module contains the core components needed for the bare application to run; the RCP platform, windowing GUI, sleuthkit bindings, datamodel / storage, explorer, result viewers, content viewers, ingest framework, reporting, and core tools, such as the file search.\n\n\ + The framework included in the module contains APIs for developing modules for ingest, viewers and reporting. \ + The modules can be deployed as Plugins using the Autopsy plugin installer.\n\ + This module should not be uninstalled - without it, Autopsy will not run.\n\n\ + For more information, see http://www.sleuthkit.org/autopsy/ OpenIDE-Module-Name=Autopsy-Core OpenIDE-Module-Short-Description=Autopsy Core Module org_sleuthkit_autopsy_core_update_center=http://sleuthkit.org/autopsy/updates.xml diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED index 609d68bcd1..0636340b0b 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED @@ -75,9 +75,9 @@ DataContentViewerHex.totalPageLabel.text_1=100 DataContentViewerHex.pageLabel2.text=Page # Product Information panel -LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
+LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
Format_OperatingSystem_Value={0} version {1} running on {2} -LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
+LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
SortChooser.dialogTitle=Choose Sort Criteria ThumbnailViewChildren.progress.cancelling=(Cancelling) # {0} - file name @@ -105,7 +105,7 @@ DataResultViewerThumbnail.pageNextButton.text= DataResultViewerThumbnail.imagesLabel.text=Images: DataResultViewerThumbnail.imagesRangeLabel.text=- DataResultViewerThumbnail.pageNumLabel.text=- -DataResultViewerThumbnail.filePathLabel.text=\ +DataResultViewerThumbnail.filePathLabel.text=\ \ \ DataResultViewerThumbnail.goToPageLabel.text=Go to Page: DataResultViewerThumbnail.goToPageField.text= AdvancedConfigurationDialog.cancelButton.text=Cancel diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED index 18e279dd2c..a0d535f8e6 100755 --- a/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/coreutils/Bundle.properties-MERGED @@ -30,7 +30,9 @@ PlatformUtil.getProcVmUsed.sigarNotInit.msg=Cannot get virt mem used, sigar not PlatformUtil.getProcVmUsed.gen.msg=Cannot get virt mem used, {0} PlatformUtil.getJvmMemInfo.usageText=JVM heap usage: {0}, JVM non-heap usage: {1} PlatformUtil.getPhysicalMemInfo.usageText=Physical memory usage (max, total, free): {0}, {1}, {2} -PlatformUtil.getAllMemUsageInfo.usageText={0}\n{1}\nProcess Virtual Memory: {2} +PlatformUtil.getAllMemUsageInfo.usageText={0}\n\ +{1}\n\ +Process Virtual Memory: {2} # {0} - file name ReadImageTask.mesageText=Reading image: {0} StringExtract.illegalStateException.cannotInit.msg=Unicode table not properly initialized, cannot instantiate StringExtract diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED index 19a399957a..d753a6e329 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED @@ -313,10 +313,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window ImageNode.createSheet.name.name=Name ImageNode.createSheet.name.displayName=Name ImageNode.createSheet.name.desc=no description -Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null! -Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""! -Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0} -Installer.tskLibErr.err=Fatal Error! +Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\! +Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\! +Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0} +Installer.tskLibErr.err=Fatal Error\! InterestingHits.interestingItems.text=INTERESTING ITEMS InterestingHits.displayName.text=Interesting Items InterestingHits.createSheet.name.name=Name diff --git a/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED index b6e3f1b3f9..075a0e7afb 100755 --- a/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/filesearch/Bundle.properties-MERGED @@ -19,7 +19,7 @@ KnownStatusSearchPanel.knownCheckBox.text=Known Status: KnownStatusSearchPanel.knownBadOptionCheckBox.text=Notable KnownStatusSearchPanel.knownOptionCheckBox.text=Known (NSRL or other) KnownStatusSearchPanel.unknownOptionCheckBox.text=Unknown -DateSearchFilter.noneSelectedMsg.text=At least one date type must be selected! +DateSearchFilter.noneSelectedMsg.text=At least one date type must be selected\! DateSearchPanel.dateCheckBox.text=Date: DateSearchPanel.jLabel4.text=Timezone: DateSearchPanel.createdCheckBox.text=Created @@ -60,7 +60,7 @@ FileSearchPanel.search.results.details=Large number of matches may impact perfor FileSearchPanel.search.exception.noFilterSelected.msg=At least one filter must be selected. FileSearchPanel.search.validationErr.msg=Validation Error: {0} FileSearchPanel.emptyWhereClause.text=Invalid options, nothing to show. -KnownStatusSearchFilter.noneSelectedMsg.text=At least one known status must be selected! +KnownStatusSearchFilter.noneSelectedMsg.text=At least one known status must be selected\! NameSearchFilter.emptyNameMsg.text=Must enter something for name search. SizeSearchPanel.sizeCompareComboBox.equalTo=equal to SizeSearchPanel.sizeCompareComboBox.greaterThan=greater than diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED index a99e8f1b9e..11fbd0a9d8 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED @@ -144,7 +144,7 @@ IngestJob.cancelReason.outOfDiskSpace.text=Out of disk space IngestJob.cancelReason.servicesDown.text=Services Down IngestJob.cancelReason.caseClosed.text=Case closed IngestJobSettingsPanel.globalSettingsButton.text=Global Settings -gest= +gest IngestJobSettingsPanel.globalSettingsButton.actionCommand=Advanced IngestJobSettingsPanel.globalSettingsButton.text=Global Settings IngestJobSettingsPanel.pastJobsButton.text=History diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED index 57f7e7ff9b..6f7251676d 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/Bundle.properties-MERGED @@ -12,7 +12,12 @@ ExtractArchiveWithPasswordAction.progress.text=Unpacking contents of archive: {0 ExtractArchiveWithPasswordAction.prompt.text=Enter Password ExtractArchiveWithPasswordAction.prompt.title=Enter Password OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Embedded File Extraction Ingest Module\n\nThe Embedded File Extraction Ingest Module processes document files (such as doc, docx, ppt, pptx, xls, xlsx) and archive files (such as zip and others archive types supported by the 7zip extractor).\nContents of these files are extracted and the derived files are added back to the current ingest to be processed by the configured ingest modules.\nIf the derived file happens to be an archive file, it will be re-processed by the 7zip extractor - the extractor will process archive files N-levels deep.\n\nThe extracted files are navigable in the directory tree.\n\nThe module is supported on Windows, Linux and Mac operating systems. +OpenIDE-Module-Long-Description=\ + Embedded File Extraction Ingest Module\n\nThe Embedded File Extraction Ingest Module processes document files (such as doc, docx, ppt, pptx, xls, xlsx) and archive files (such as zip and others archive types supported by the 7zip extractor).\n\ + Contents of these files are extracted and the derived files are added back to the current ingest to be processed by the configured ingest modules.\n\ + If the derived file happens to be an archive file, it will be re-processed by the 7zip extractor - the extractor will process archive files N-levels deep.\n\n\ + The extracted files are navigable in the directory tree.\n\n\ + The module is supported on Windows, Linux and Mac operating systems. OpenIDE-Module-Name=Embedded File Extraction OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED index 5063bd55fa..cfaadf1635 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/Bundle.properties-MERGED @@ -36,27 +36,27 @@ FileExtMismatchSettingsPanel.jLabel1.text=File Types: FileExtMismatchSettingsPanel.newExtButton.text=New Extension FileExtMismatchSettingsPanel.newMimePrompt.message=Add a new MIME file type: FileExtMismatchSettingsPanel.newMimePrompt.title=New MIME -FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.message=MIME type text is empty! +FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.message=MIME type text is empty\! FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.title=Empty type -FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.message=MIME type not supported! +FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.message=MIME type not supported\! FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.title=Type not supported -FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.message=MIME type already exists! +FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.message=MIME type already exists\! FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.title=Type already exists FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.message=MIME type is not detectable by this module. FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.title=Type not detectable -FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.message=No MIME type selected! +FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.message=No MIME type selected\! FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.title=No type selected FileExtMismatchSettingsPanel.newExtPrompt.message=Add an allowed extension: FileExtMismatchSettingsPanel.newExtPrompt.title=New allowed extension -FileExtMismatchSettingsPanel.newExtPrompt.empty.message=Extension text is empty! +FileExtMismatchSettingsPanel.newExtPrompt.empty.message=Extension text is empty\! FileExtMismatchSettingsPanel.newExtPrompt.empty.title=Extension text empty -FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.message=No MIME type selected! +FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.message=No MIME type selected\! FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.title=No MIME type selected -FileExtMismatchSettingsPanel.newExtPrompt.extExists.message=Extension already exists! +FileExtMismatchSettingsPanel.newExtPrompt.extExists.message=Extension already exists\! FileExtMismatchSettingsPanel.newExtPrompt.extExists.title=Extension already exists -FileExtMismatchSettingsPanel.removeExtButton.noneSelected.message=No extension selected! +FileExtMismatchSettingsPanel.removeExtButton.noneSelected.message=No extension selected\! FileExtMismatchSettingsPanel.removeExtButton.noneSelected.title=No extension selected -FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.message=No MIME type selected! +FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.message=No MIME type selected\! FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.title=No MIME type selected FileExtMismatchSettingsPanel.removeTypeButton.toolTipText= FileExtMismatchModuleSettingsPanel.checkAllRadioButton.text=Check all file types diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED index 8dbb55e35f..dd5aa258cc 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties-MERGED @@ -61,7 +61,10 @@ ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash se ImportCentralRepoDbProgressDialog.linesProcessed.message=\ hashes processed ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress OpenIDE-Module-Display-Category=Ingest Module -OpenIDE-Module-Long-Description=Hash Set ingest module. \n\nThe ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\nThe module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. +OpenIDE-Module-Long-Description=\ + Hash Set ingest module. \n\n\ + The ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\n\ + The module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. OpenIDE-Module-Name=HashDatabases OptionsCategory_Name_HashDatabase=Hash Sets OptionsCategory_Keywords_HashDatabase=Hash Sets @@ -188,7 +191,10 @@ HashDbSearchThread.name.searching=Searching HashDbSearchThread.noMoreFilesWithMD5Msg=No other files with the same MD5 hash were found. ModalNoButtons.indexingDbsTitle=Indexing hash sets ModalNoButtons.indexingDbTitle=Indexing hash set -ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \nThe generated index will be left unusable. If you choose to continue,\nplease delete the corresponding -md5.idx file in the hash folder.\nExit indexing? +ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \n\ +The generated index will be left unusable. If you choose to continue,\n\ + please delete the corresponding -md5.idx file in the hash folder.\n\ + Exit indexing? ModalNoButtons.dlgTitle.unfinishedIndexing=Unfinished Indexing ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 hash set ModalNoButtons.indexThese.currentlyIndexing1OfNDbs=Currently indexing 1 of {0} diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED index 06699b446a..6fb258f014 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED @@ -122,8 +122,8 @@ FilesSetRulePanel.nameTextField.text= FilesSetRulePanel.ruleNameLabel.text=Rule Name (Optional): FilesSetRulePanel.messages.emptyNameCondition=You must specify a name pattern for this rule. FilesSetRulePanel.messages.invalidNameRegex=The name regular expression is not valid:\n\n{0} -FilesSetRulePanel.messages.invalidCharInName=The name cannot contain \\, /, :, *, ?, ", <, or > unless it is a regular expression. -FilesSetRulePanel.messages.invalidCharInPath=The path cannot contain \\, :, *, ?, ", <, or > unless it is a regular expression. +FilesSetRulePanel.messages.invalidCharInName=The name cannot contain \\, /, :, *, ?, \", <, or > unless it is a regular expression. +FilesSetRulePanel.messages.invalidCharInPath=The path cannot contain \\, :, *, ?, \", <, or > unless it is a regular expression. FilesSetRulePanel.messages.invalidPathRegex=The path regular expression is not valid:\n\n{0} FilesSetDefsPanel.doFileSetsDialog.duplicateRuleSet.text=Rule set with name {0} already exists. FilesSetRulePanel.pathSeparatorInfoLabel.text=Folder must be in parent path. Use '/' to give consecutive names diff --git a/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED index f5dd54dc50..1d07988e4c 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/modules/photoreccarver/Bundle.properties-MERGED @@ -24,7 +24,7 @@ PhotoRecIngestModule.complete.totalParsetime=Total Parsing Time: PhotoRecIngestModule.complete.photoRecResults=PhotoRec Results PhotoRecIngestModule.NotEnoughDiskSpace.detail.msg=PhotoRec error processing {0} with {1} Not enough space on primary disk to save unallocated space. PhotoRecIngestModule.cancelledByUser=PhotoRec cancelled by user. -PhotoRecIngestModule.error.exitValue=PhotoRec carver returned error exit value = {0} when scanning {1} +PhotoRecIngestModule.error.exitValue=PhotoRec carver returned error exit value \= {0} when scanning {1} PhotoRecIngestModule.error.msg=Error processing {0} with PhotoRec carver. PhotoRecIngestModule.complete.numberOfErrors=Number of Errors while Carving: PhotoRecCarverIngestJobSettingsPanel.detectionSettingsLabel.text=PhotoRec Settings diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED index fce93671b3..3db1b822ea 100755 --- a/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/report/modules/html/Bundle.properties-MERGED @@ -5,8 +5,8 @@ ReportHTML.getName.text=HTML Report ReportHTML.getDesc.text=A report about results and tagged items in HTML format. ReportHTML.writeIndex.title=for case {0} ReportHTML.writeIndex.noFrames.msg=Your browser is not compatible with our frame setup. -ReportHTML.writeIndex.noFrames.seeNav=Please see the navigation page for artifact links, -ReportHTML.writeIndex.seeSum=and the summary page for a case summary. +ReportHTML.writeIndex.noFrames.seeNav=Please see the navigation page for artifact links, +ReportHTML.writeIndex.seeSum=and the summary page for a case summary. ReportHTML.writeNav.title=Report Navigation ReportHTML.writeNav.h1=Report Navigation ReportHTML.writeNav.summary=Case Summary @@ -16,7 +16,7 @@ ReportHTML.writeSum.caseNumber=Case Number: ReportHTML.writeSum.caseNumImages=Number of data sources in case: ReportHTML.writeSum.examiner=Examiner: ReportHTML.writeSum.title=Case Summary -ReportHTML.writeSum.warningMsg=Warning, this report was run before ingest services completed! +ReportHTML.writeSum.warningMsg=Warning, this report was run before ingest services completed\! # # autopsy/test/scripts/regression.py._html_report_diff() uses reportGenOn.text, caseName, caseNum, # examiner as a regex signature to skip report.html and summary.html diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED index 854c57bed1..86fd175181 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED @@ -65,19 +65,15 @@ DayOfTheWeekRenderer_Tuesday_Label=Tuesday DayOfTheWeekRenderer_Wednesday_Label=Wednesday GeneralOptionsPanelController.moduleErr.msg=A module caused an error listening to GeneralOptionsPanelController updates. See log to determine which module. Some data could be incomplete. GeneralOptionsPanelController.moduleErr=Module Error -# {0} - errorMessage MultiUserTestTool.criticalError=Critical error running data source processor on test data source: {0} MultiUserTestTool.errorStartingIngestJob=Ingest manager error while starting ingest job -# {0} - cancellationReason MultiUserTestTool.ingestCancelled=Ingest cancelled due to {0} MultiUserTestTool.ingestSettingsError=Failed to analyze data source due to ingest settings errors MultiUserTestTool.noContent=Test data source failed to produce content -# {0} - serviceName MultiUserTestTool.serviceDown=Multi User service is down: {0} MultiUserTestTool.startupError=Failed to analyze data source due to ingest job startup error MultiUserTestTool.unableAddFileAsDataSource=Unable to add test file as data source to case MultiUserTestTool.unableCreatFile=Unable to create a file in case output directory -# {0} - serviceName MultiUserTestTool.unableToCheckService=Unable to check Multi User service state: {0} MultiUserTestTool.unableToCreateCase=Unable to create case MultiUserTestTool.unableToInitializeDatabase=Case database was not successfully initialized From c1a7d8e44252aa1e0934b368aeff2b15f0d25f5f Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 19 Nov 2021 14:50:55 -0500 Subject: [PATCH 057/142] Update FileTypeExtensionsEvent.java --- .../mainui/datamodel/events/FileTypeExtensionsEvent.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java index 630bdc67d0..dadac922b1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeExtensionsEvent.java @@ -21,7 +21,8 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; /** - * An event where file type extensions could be affected. + * An event to signal that files have been added or removed + * with the given extension on the given data source. */ public class FileTypeExtensionsEvent implements DAOEvent { From d27bb5f65723e4b50f0a05e8f47b5285cc6a0cda Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 19 Nov 2021 14:51:44 -0500 Subject: [PATCH 058/142] Update TagsEvent.java --- .../sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java index 12cfc26d64..948f96e20d 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TagsEvent.java @@ -22,7 +22,8 @@ import java.util.Objects; import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType; /** - * An event affecting tags + * An event to signal that tags have been added or removed on the + * given data source with the given types. */ public class TagsEvent implements DAOEvent { From 79877ce39e6cb5240af444ed5f65e378ef487a09 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 19 Nov 2021 14:52:12 -0500 Subject: [PATCH 059/142] private isInvalidating --- .../autopsy/mainui/datamodel/AnalysisResultDAO.java | 4 ++-- .../sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java | 2 +- .../sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java | 2 +- .../sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java | 4 ++-- .../sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java | 2 +- .../src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java | 2 +- .../org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java | 6 +++--- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index d5b61f3165..3f181d9e6a 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -278,7 +278,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); } - public boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) { + private boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) { if (!(eventData instanceof AnalysisResultEvent)) { return false; } @@ -288,7 +288,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { && (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId()); } - public boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) { + private boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) { if (!(event instanceof AnalysisResultSetEvent)) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index f577aae002..ec5a13bc59 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -212,7 +212,7 @@ public class CommAccountsDAO extends AbstractDAO { * * @return True if event invalidates parameters. */ - public boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { + private boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { if (evt instanceof CommAccountsEvent) { CommAccountsEvent commEvt = (CommAccountsEvent) evt; return (parameters.getType().getTypeName().equals(commEvt.getAccountType())) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 45b47025d4..7d2955aefa 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -119,7 +119,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } - public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { + private boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { if (!(eventData instanceof DataArtifactEvent)) { return false; } else { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 7550893779..8dc1952245 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -119,7 +119,7 @@ public class FileSystemDAO extends AbstractDAO { private final Cache, BaseSearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); - public boolean isSystemContentInvalidating(FileSystemContentSearchParam key, DAOEvent daoEvent) { + private boolean isSystemContentInvalidating(FileSystemContentSearchParam key, DAOEvent daoEvent) { if (!(daoEvent instanceof FileSystemContentEvent)) { return false; } @@ -129,7 +129,7 @@ public class FileSystemDAO extends AbstractDAO { return contentEvt.getContentObjectId() == null || key.getContentObjectId().equals(contentEvt.getContentObjectId()); } - public boolean isSystemHostInvalidating(FileSystemHostSearchParam key, DAOEvent daoEvent) { + private boolean isSystemHostInvalidating(FileSystemHostSearchParam key, DAOEvent daoEvent) { if (!(daoEvent instanceof FileSystemHostEvent)) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 9b9c7961f2..ccb3230307 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -116,7 +116,7 @@ public class OsAccountsDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams)); } - public boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) { + private boolean isOSAccountInvalidatingEvt(OsAccountsSearchParams searchParams, DAOEvent evt) { return evt instanceof OsAccountEvent; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index f4b6d0a592..4c9956c83a 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -298,7 +298,7 @@ public class TagsDAO extends AbstractDAO { * * @return True if the event could affect the results of the search params. */ - public boolean isTagsInvalidatingEvent(TagsSearchParams tagParams, DAOEvent daoEvt) { + private boolean isTagsInvalidatingEvent(TagsSearchParams tagParams, DAOEvent daoEvt) { if (!(daoEvt instanceof TagsEvent)) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index efe6fe6c7d..42003ef430 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -163,7 +163,7 @@ public class ViewsDAO extends AbstractDAO { return searchParamsCache.get(searchParams, () -> fetchSizeSearchResultsDTOs(key.getSizeFilter(), key.getDataSourceId(), startItem, maxCount)); } - public boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, DAOEvent eventData) { + private boolean isFilesByExtInvalidating(FileTypeExtensionsSearchParams key, DAOEvent eventData) { if (!(eventData instanceof FileTypeExtensionsEvent)) { return false; } @@ -174,7 +174,7 @@ public class ViewsDAO extends AbstractDAO { && (key.getDataSourceId() == null || key.getDataSourceId() == extEvt.getDataSourceId()); } - public boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, DAOEvent eventData) { + private boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, DAOEvent eventData) { if (!(eventData instanceof FileTypeMimeEvent)) { return false; } @@ -184,7 +184,7 @@ public class ViewsDAO extends AbstractDAO { && (key.getDataSourceId() == null || key.getDataSourceId() == mimeEvt.getDataSourceId()); } - public boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, DAOEvent eventData) { + private boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, DAOEvent eventData) { if (!(eventData instanceof FileTypeSizeEvent)) { return false; } From ae4a059cac18c8383323a9f649f3281b759d2347 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Fri, 19 Nov 2021 14:52:35 -0500 Subject: [PATCH 060/142] Update FileTypeSizeEvent.java --- .../autopsy/mainui/datamodel/events/FileTypeSizeEvent.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java index 71cfc3c1d5..eb0f37f8a4 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileTypeSizeEvent.java @@ -22,7 +22,8 @@ import java.util.Objects; import org.sleuthkit.autopsy.mainui.datamodel.FileSizeFilter; /** - * Key for accessing data about file sizeFilter from the DAO. + * An event to signal that files have been added or removed + * within the given size range on the given data source. */ public class FileTypeSizeEvent implements DAOEvent { From 44ca84dcaf70d599d858db07fa05e1dbb18595a7 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 19 Nov 2021 15:17:18 -0500 Subject: [PATCH 061/142] updates --- .../corecomponents/DataResultPanel.java | 2 +- .../mainui/datamodel/AnalysisResultDAO.java | 30 ++++++------------ .../mainui/datamodel/CommAccountsDAO.java | 10 ++---- .../mainui/datamodel/DataArtifactDAO.java | 10 ++---- .../mainui/datamodel/FileSystemDAO.java | 29 +++++------------ .../mainui/datamodel/OsAccountsDAO.java | 10 ++---- .../autopsy/mainui/datamodel/TagsDAO.java | 10 ++---- .../autopsy/mainui/datamodel/ViewsDAO.java | 30 ++++++------------ .../events/FileSystemContentEvent.java | 3 +- .../datamodel/events/FileSystemHostEvent.java | 3 +- .../events/FileSystemPersonEvent.java | 3 +- .../autopsy/mainui/nodes/DAOFetcher.java | 3 +- .../autopsy/mainui/nodes/SearchManager.java | 31 +++---------------- 13 files changed, 49 insertions(+), 125 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 4fe28ea506..a47dcbdf28 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -1397,7 +1397,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C */ private void refreshSearchResultChildren() { try { - refreshSearchResultChildren(this.searchResultManager.getRefreshedData()); + refreshSearchResultChildren(this.searchResultManager.getResults()); } catch (ExecutionException | IllegalArgumentException ex) { logger.log(Level.WARNING, "There was an error refreshing data: ", ex); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 3f181d9e6a..52269c762f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -260,7 +260,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return new AnalysisResultRowDTO((AnalysisResult) artifact, srcContent, isTimelineSupported, cellValues, id); } - public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); if (artType == null || artType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT @@ -271,10 +271,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - analysisResultCache.invalidate(searchParams); - } - return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); } @@ -299,7 +295,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } // GVDTODO handle keyword hits - public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Data source id must be null or > 0. " @@ -307,16 +303,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - setHitCache.invalidate(searchParams); - } - return setHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams)); } // TODO - JIRA-8117 // This needs to use more than just the set name - public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public AnalysisResultTableSearchResultsDTO getKeywordHitsForTable(KeywordHitSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Data source id must be null or > 0. " @@ -324,10 +316,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - keywordHitCache.invalidate(searchParams); - } - return keywordHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams)); } @@ -840,8 +828,8 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override @@ -869,8 +857,8 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override @@ -898,8 +886,8 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index ec5a13bc59..db9b9bb2ee 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -75,7 +75,7 @@ public class CommAccountsDAO extends AbstractDAO { return instance; } - public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getType() == null) { throw new IllegalArgumentException("Must have non-null type"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -83,10 +83,6 @@ public class CommAccountsDAO extends AbstractDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchCommAccountsDTOs(searchParams)); } @@ -241,8 +237,8 @@ public class CommAccountsDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getCommAcounts(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 7d2955aefa..b46c710113 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -101,7 +101,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return new DataArtifactRowDTO((DataArtifact) artifact, srcContent, linkedFile, isTimelineSupported, cellValues, id); } - public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public DataArtifactTableSearchResultsDTO getDataArtifactsForTable(DataArtifactSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); if (artType == null || artType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT @@ -112,10 +112,6 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); - if (hardRefresh) { - this.dataArtifactCache.invalidate(searchParams); - } - return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } @@ -242,8 +238,8 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getDataArtifactsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 8dc1952245..91c22ec149 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -296,33 +296,18 @@ public class FileSystemDAO extends AbstractDAO { return pagedArtsStream.collect(Collectors.toList()); } - public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { - + public BaseSearchResultsDTO getContentForTable(FileSystemContentSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchContentForTableFromContent(searchParams)); } - public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { - + public BaseSearchResultsDTO getContentForTable(FileSystemHostSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchContentForTableFromHost(searchParams)); } - public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { - + public BaseSearchResultsDTO getHostsForTable(FileSystemPersonSearchParam objectKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { SearchParams searchParams = new SearchParams<>(objectKey, startItem, maxCount); - if (hardRefresh) { - searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams)); } @@ -477,8 +462,8 @@ public class FileSystemDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override @@ -503,8 +488,8 @@ public class FileSystemDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getContentForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index ccb3230307..cf3dc3e811 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -101,7 +101,7 @@ public class OsAccountsDAO extends AbstractDAO { return new ColumnKey(name, name, Bundle.OsAccountsDAO_fileColumns_noDescription()); } - public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key == null) { throw new IllegalArgumentException("Search parameters are null"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -109,10 +109,6 @@ public class OsAccountsDAO extends AbstractDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams)); } @@ -220,8 +216,8 @@ public class OsAccountsDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 4c9956c83a..837c124c39 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -128,7 +128,7 @@ public class TagsDAO extends AbstractDAO { return new ColumnKey(name, name, Bundle.TagsDAO_fileColumns_noDescription()); } - public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getTags(TagsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getTagName() == null) { throw new IllegalArgumentException("Must have non-null tag name"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -138,10 +138,6 @@ public class TagsDAO extends AbstractDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchTagsDTOs(searchParams)); } @@ -422,8 +418,8 @@ public class TagsDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getTags(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 42003ef430..f9fe65ad16 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -118,7 +118,7 @@ public class ViewsDAO extends AbstractDAO { return Case.getCurrentCaseThrows().getSleuthkitCase(); } - public SearchResultsDTO getFilesByExtension(FileTypeExtensionsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getFilesByExtension(FileTypeExtensionsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getFilter() == null) { throw new IllegalArgumentException("Must have non-null filter"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -126,14 +126,10 @@ public class ViewsDAO extends AbstractDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchExtensionSearchResultsDTOs(key.getFilter(), key.getDataSourceId(), startItem, maxCount)); } - public SearchResultsDTO getFilesByMime(FileTypeMimeSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getFilesByMime(FileTypeMimeSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getMimeType() == null) { throw new IllegalArgumentException("Must have non-null filter"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -141,14 +137,10 @@ public class ViewsDAO extends AbstractDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchMimeSearchResultsDTOs(key.getMimeType(), key.getDataSourceId(), startItem, maxCount)); } - public SearchResultsDTO getFilesBySize(FileTypeSizeSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException { + public SearchResultsDTO getFilesBySize(FileTypeSizeSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getSizeFilter() == null) { throw new IllegalArgumentException("Must have non-null filter"); } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) { @@ -156,10 +148,6 @@ public class ViewsDAO extends AbstractDAO { } SearchParams searchParams = new SearchParams<>(key, startItem, maxCount); - if (hardRefresh) { - this.searchParamsCache.invalidate(searchParams); - } - return searchParamsCache.get(searchParams, () -> fetchSizeSearchResultsDTOs(key.getSizeFilter(), key.getDataSourceId(), startItem, maxCount)); } @@ -835,8 +823,8 @@ public class ViewsDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getFilesByExtension(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override @@ -864,8 +852,8 @@ public class ViewsDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getFilesByMime(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override @@ -893,8 +881,8 @@ public class ViewsDAO extends AbstractDAO { } @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return getDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getFilesBySize(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java index 1474c95f04..a72c93cea2 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemContentEvent.java @@ -21,7 +21,8 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; /** - * An event that affects the given parent content. + * An event signaling that children files were added or removed from the given + * parent ID. */ public class FileSystemContentEvent implements DAOEvent { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java index 1788463cc1..f777435474 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemHostEvent.java @@ -21,7 +21,8 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; /** - * An event that affects the given host. + * An event signaling that a data source has been added or removed from the + * given Host. */ public class FileSystemHostEvent implements DAOEvent { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java index bcb9db888f..110429a6d9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/FileSystemPersonEvent.java @@ -21,7 +21,8 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; /** - * An event that affects the given person. + * An event signaling that a host has been added or removed from the given + * Person. */ public class FileSystemPersonEvent implements DAOEvent { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java index 9dd06f4428..5bff79f50b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DAOFetcher.java @@ -55,13 +55,12 @@ public abstract class DAOFetcher

{ * * @param pageSize The number of items per page. * @param pageIdx The page index. - * @param hardRefresh Whether or not to perform a hard refresh. * * @return The retrieved data. * * @throws ExecutionException */ - public abstract SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException; + public abstract SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException; /** * Returns true if the ingest module event will require a refresh in the diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java index 951c7979c8..c331eac5c7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchManager.java @@ -133,7 +133,7 @@ public class SearchManager { */ public synchronized SearchResultsDTO updatePageIdx(int pageIdx) throws IllegalArgumentException, ExecutionException { setPageIdx(pageIdx); - return fetchResults(false); + return getResults(); } /** @@ -195,17 +195,6 @@ public class SearchManager { return dataFetcher.isRefreshRequired(evt); } - /** - * Forces a refresh of data based on current search parameters. - * - * @return The refreshed data. - * - * @throws ExecutionException - */ - public synchronized SearchResultsDTO getRefreshedData() throws ExecutionException { - return fetchResults(true); - } - /** * Queries the dao cache for results storing the result in the current * search results. @@ -216,25 +205,13 @@ public class SearchManager { * @throws ExecutionException */ public synchronized SearchResultsDTO getResults() throws IllegalArgumentException, ExecutionException { - return fetchResults(false); + return fetchResults(this.daoFetcher); } - /** - * Fetches results using current page fetcher or returns null if no current - * page fetcher. Also stores current results in local variable. - * - * @return The current search results or null if no current page fetcher. - * - * @throws ExecutionException - */ - private synchronized SearchResultsDTO fetchResults(boolean hardRefresh) throws ExecutionException { - return fetchResults(this.daoFetcher, hardRefresh); - } - - private synchronized SearchResultsDTO fetchResults(DAOFetcher dataFetcher, boolean hardRefresh) throws ExecutionException { + private synchronized SearchResultsDTO fetchResults(DAOFetcher dataFetcher) throws ExecutionException { SearchResultsDTO newResults = null; if (dataFetcher != null) { - newResults = dataFetcher.getSearchResults(this.pageSize, this.pageIdx, hardRefresh); + newResults = dataFetcher.getSearchResults(this.pageSize, this.pageIdx); } this.currentSearchResults = newResults; From be4638481cad93e3ca0363622966eac9f8c942bc Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 19 Nov 2021 15:18:08 -0500 Subject: [PATCH 062/142] 7895 CR data artifact ingest module --- .../ingestmodule/CentralRepoIngestModule.java | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java index 1d30b2f9bf..f7ee3d0aa2 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java @@ -57,7 +57,19 @@ final class CentralRepoIngestModule implements FileIngestModule { private IngestJobContext context; private CentralRepository centralRepo; private CorrelationAttributeInstance.Type filesType; - + + /** + * Constructs a file ingest module that adds correlation attributes for + * files to the central repository, and makes previously notable analysis + * results for files marked as notable in other cases. + * + * @param settings The ingest job settings. + */ + CentralRepoIngestModule(IngestSettings settings) { + flagNotableItems = settings.isFlagTaggedNotableItems(); + saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); + } + @Override public ProcessResult process(AbstractFile abstractFile) { if (!flagNotableItems && !saveCorrAttrInstances) { @@ -117,19 +129,18 @@ final class CentralRepoIngestModule implements FileIngestModule { return ProcessResult.OK; } - - /** - * Constructs a file ingest module that adds correlation attributes for - * files to the central repository, and makes previously notable analysis - * results for files marked as notable in other cases. - * - * @param settings The ingest job settings. - */ - CentralRepoIngestModule(IngestSettings settings) { - flagNotableItems = settings.isFlagTaggedNotableItems(); - saveCorrAttrInstances = settings.shouldCreateCorrelationProperties(); - } + @Override + public void shutDown() { + if (refCounter.decrementAndGet(context.getJobId()) == 0) { + try { + centralRepo.commitAttributeInstancesBulk(); + } catch (CentralRepoException ex) { + logger.log(Level.SEVERE, String.format("Error committing bulk insert of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS + } + } + } + @Messages({ "CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository", "CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository", @@ -188,15 +199,4 @@ final class CentralRepoIngestModule implements FileIngestModule { } } - @Override - public void shutDown() { - if (refCounter.decrementAndGet(context.getJobId()) == 0) { - try { - centralRepo.commitAttributeInstancesBulk(); - } catch (CentralRepoException ex) { - logger.log(Level.SEVERE, String.format("Error committing bulk insert of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS - } - } - } - } From 14d268622bb4710f24480365cf74d5a5d777ccec Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 19 Nov 2021 15:29:16 -0500 Subject: [PATCH 063/142] 7895 CR data artifact ingest module --- .../centralrepository/ingestmodule/CentralRepoIngestModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java index f7ee3d0aa2..32aaedfac3 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java @@ -54,9 +54,9 @@ final class CentralRepoIngestModule implements FileIngestModule { private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private final boolean flagNotableItems; private final boolean saveCorrAttrInstances; + private CorrelationAttributeInstance.Type filesType; private IngestJobContext context; private CentralRepository centralRepo; - private CorrelationAttributeInstance.Type filesType; /** * Constructs a file ingest module that adds correlation attributes for From 1749898d0425971c9fae04cd0bd7ade83f20dbdc Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 19 Nov 2021 15:33:16 -0500 Subject: [PATCH 064/142] working on tree updates --- .../mainui/nodes/TreeChildFactory.java | 32 +++++++------------ 1 file changed, 11 insertions(+), 21 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 395ef12f2e..86d74d17d1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -34,13 +34,14 @@ import org.openide.nodes.ChildFactory; import org.openide.nodes.Node; import org.openide.util.WeakListeners; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.guiutils.RefreshThrottler; import org.sleuthkit.autopsy.guiutils.RefreshThrottler.Refresher; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; /** * Factory for populating tree with results. @@ -55,26 +56,13 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable { - String eventType = evt.getPropertyName(); - if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { - // case was closed. Remove listeners so that we don't get called with a stale case handle - if (evt.getNewValue() == null) { - removeNotify(); - } - } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) - || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { - /** - * This is a stop gap measure until a different way of handling the - * closing of cases is worked out. Currently, remote events may be - * received for a case that is already closed. - */ - try { - Case.getCurrentCaseThrows(); - refresh(false); - } catch (NoCurrentCaseException notUsed) { - /** - * Case is closed, do nothing. - */ + if (evt.getNewValue() instanceof DAOAggregateEvent) { + DAOAggregateEvent aggEvt = (DAOAggregateEvent) evt.getNewValue(); + for (DAOEvent daoEvt : aggEvt.getEvents()) { + if (isChildInvalidating(daoEvt)) { + updateData(); + break; + } } } }; @@ -217,4 +205,6 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable getChildResults() throws IllegalArgumentException, ExecutionException; + + protected abstract boolean isChildInvalidating(DAOEvent daoEvt); } From 7b9973272da1a2fbca20701aa9724233990fe36b Mon Sep 17 00:00:00 2001 From: apriestman Date: Mon, 22 Nov 2021 09:16:21 -0500 Subject: [PATCH 065/142] Continuing work on actions --- .../mainui/datamodel/FileSystemColumnUtils.java | 2 +- .../autopsy/mainui/datamodel/FileSystemDAO.java | 10 +++++++++- .../autopsy/mainui/nodes/Bundle.properties-MERGED | 1 - .../autopsy/mainui/nodes/FileSystemFactory.java | 14 +++----------- .../autopsy/mainui/nodes/SpecialDirectoryNode.java | 5 ----- .../mainui/nodes/actions/ActionContext.java | 4 ---- .../mainui/nodes/actions/ActionsFactory.java | 9 ++------- 7 files changed, 15 insertions(+), 30 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index ce8a5fdf3e..6efd60cf2a 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -485,7 +485,7 @@ public class FileSystemColumnUtils { * * @return The display name. */ - private static String getVolumeDisplayName(Volume vol) { + public static String getVolumeDisplayName(Volume vol) { // set name, display name, and icon String volName = "vol" + Long.toString(vol.getAddr()); long end = vol.getStart() + (vol.getLength() - 1); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index e5236527c1..214633ae67 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -368,7 +368,7 @@ public class FileSystemDAO { child.getClass().getSimpleName(), new FileSystemContentSearchParam(child.getId()), child, - child.getName(), + getNameForContent(child), countForNode )); // TODO sort @@ -395,6 +395,14 @@ public class FileSystemDAO { throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); } } + + private String getNameForContent(Content content) { + // Currently the only special case is for volumes + if (content instanceof Volume) { + return FileSystemColumnUtils.getVolumeDisplayName((Volume)content); + } + return content.getName(); + } /** * Handles fetching and paging of data for file types by mime type. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED index 76636c3af4..3568b60122 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED @@ -1,6 +1,5 @@ AnalysisResultTypeFactory_adHocName=Adhoc Results FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files -FileSystemFactory.FileSystemTreeNode.OpenFileSearchByAttr.text=Open File Search by Attributes FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content ImageNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files SearchResultRootNode_createSheet_childCount_displayName=Child Count diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java index 38cf8ef412..bacff3f8ae 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -33,7 +33,6 @@ import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction; -import org.sleuthkit.autopsy.directorytree.FileSearchAction; import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; @@ -216,8 +215,7 @@ public class FileSystemFactory extends TreeChildFactory implements ActionContext { @@ -274,7 +272,6 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(itemData.getDisplayName(), + super(FileSystemColumnUtils.getVolumeDisplayName(volume), NodeIconUtil.VOLUME.getPath(), itemData, createChildrenForContent(itemData.getTypeData().getContentObjectId()), @@ -373,7 +370,7 @@ public class FileSystemFactory extends TreeChildFactory getContentForRunIngestionModuleAction() { return Optional.of(dir); } - - @Override - public Optional getContentForFileSearchAction() { - return Optional.of(dir); - } } static class LocalDirectoryTreeNode extends SpecialDirectoryTreeNode { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java index 4d01153ae8..0fd976950d 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java @@ -67,11 +67,6 @@ abstract class SpecialDirectoryNode extends BaseNode getContentForFileSearchAction() { - return Optional.of(getRowDTO().getContent()); - } - @Override public Optional getDataSourceForActions() { return getRowDTO().getContent().isDataSource() diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java index 19484aad52..bc497393e7 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionContext.java @@ -203,10 +203,6 @@ public interface ActionContext { return Optional.empty(); } - default Optional getContentForFileSearchAction() { - return Optional.empty(); - } - default Optional getDataSourceForActions() { return Optional.empty(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java index 4191996ab3..d670c877ae 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/actions/ActionsFactory.java @@ -406,15 +406,10 @@ public final class ActionsFactory { "ActionFactory_openFileSearchByAttr_text=Open File Search by Attributes" }) private static Optional getRunIngestAction(ActionContext context) { - ActionGroup group = new ActionGroup(); - Optional optional = context.getContentForFileSearchAction(); - + ActionGroup group = new ActionGroup(); + Optional optional = context.getDataSourceForActions(); if(optional.isPresent()) { group.add(new FileSearchAction(Bundle.ActionFactory_openFileSearchByAttr_text(), optional.get().getId())); - } - - optional = context.getDataSourceForActions(); - if(optional.isPresent()) { group.add(new ViewSummaryInformationAction(optional.get().getId())); group.add(new RunIngestModulesAction(Collections.singletonList(optional.get()))); group.add(new DeleteDataSourceAction(optional.get().getId())); From be0b21a90fc1e6bb887e70fda7908ce7909ab489 Mon Sep 17 00:00:00 2001 From: apriestman Date: Mon, 22 Nov 2021 09:24:02 -0500 Subject: [PATCH 066/142] Fix volume parameter for ExtractUnallocAction --- .../sleuthkit/autopsy/directorytree/ExtractUnallocAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java index a29f5c39ef..5dcd824437 100644 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ExtractUnallocAction.java @@ -104,7 +104,7 @@ final class ExtractUnallocAction extends AbstractAction { ExtractUnallocAction(String title, Image image, Volume volume) { super(title); - this.volume = null; + this.volume = volume; this.image = image; chooserFactory = new JFileChooserFactory(CustomFileChooser.class); From e091df5788253e38e5289ef54ae1d05ceb69e6c3 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Mon, 22 Nov 2021 11:39:49 -0500 Subject: [PATCH 067/142] 8046 logging the Dsp error messages which are displayed --- .../casemodule/AddImageWizardAddingProgressPanel.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java index f0753d70bd..8acde0ab57 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java @@ -425,9 +425,14 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel { // TBD: there probably should be an error level for each error addErrors(err, critErr); } - - //notify the UI of the new content added to the case + final Level level = critErr ? Level.SEVERE : Level.WARNING; new Thread(() -> { + //log error messages as Severe if there was a critical error otherwise as Warning. + //logging performed off of UI thread + for (String err : errList) { + Logger.getLogger(AddImageWizardAddingProgressVisual.class.getName()).log(level, err); + } + //notify the UI of the new content added to the case try { if (!contents.isEmpty()) { Case.getCurrentCaseThrows().notifyDataSourceAdded(contents.get(0), dataSourceId); From 6e044cbfc2b22429aa9b9a8526401b86dc05812c Mon Sep 17 00:00:00 2001 From: apriestman Date: Mon, 22 Nov 2021 11:43:35 -0500 Subject: [PATCH 068/142] Refactored extract action --- .../autopsy/mainui/nodes/ArtifactNode.java | 2 +- .../autopsy/mainui/nodes/DirectoryNode.java | 2 +- .../autopsy/mainui/nodes/FileNode.java | 2 +- .../mainui/nodes/FileSystemFactory.java | 6 ++--- .../mainui/nodes/SpecialDirectoryNode.java | 2 +- .../mainui/nodes/actions/ActionContext.java | 15 ++++++++++-- .../mainui/nodes/actions/ActionsFactory.java | 23 +++++++++++++++---- 7 files changed, 39 insertions(+), 13 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ArtifactNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ArtifactNode.java index e86c576d13..0dc993c8ed 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ArtifactNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ArtifactNode.java @@ -125,7 +125,7 @@ public abstract class ArtifactNode { } @Override - public boolean supportsExtractActions() { + public boolean supportsTableExtractActions() { return true; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java index c508c5e0df..8c814aae98 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java @@ -108,7 +108,7 @@ public class FileNode extends AbstractNode implements ActionContext { } @Override - public boolean supportsExtractActions() { + public boolean supportsTableExtractActions() { return true; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java index bacff3f8ae..5cd4edba0e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -382,7 +382,7 @@ public class FileSystemFactory extends TreeChildFactory Date: Mon, 22 Nov 2021 11:53:28 -0500 Subject: [PATCH 069/142] 8046 slightly more helpfull log message --- .../autopsy/casemodule/AddImageWizardAddingProgressPanel.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java index 8acde0ab57..58b1bb5a56 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java @@ -430,7 +430,7 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel { //log error messages as Severe if there was a critical error otherwise as Warning. //logging performed off of UI thread for (String err : errList) { - Logger.getLogger(AddImageWizardAddingProgressVisual.class.getName()).log(level, err); + Logger.getLogger(AddImageWizardAddingProgressVisual.class.getName()).log(level, "DatasourceID: {0} Error Message: {1}", new Object[]{dataSourceId.toString(), err}); } //notify the UI of the new content added to the case try { From 7f8524b3210bbedb1e788cc781ed4775f82e7426 Mon Sep 17 00:00:00 2001 From: apriestman Date: Mon, 22 Nov 2021 15:17:46 -0500 Subject: [PATCH 070/142] Cleanup --- .../RunIngestModulesAction.java | 2 +- .../datamodel/FileSystemColumnUtils.java | 46 +++- .../mainui/datamodel/FileSystemDAO.java | 42 +--- .../mainui/nodes/FileSystemFactory.java | 208 +++++++++--------- .../mainui/nodes/actions/ActionsFactory.java | 10 +- 5 files changed, 152 insertions(+), 156 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java b/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java index 4698b68006..a6721e0ab2 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/runIngestModuleWizard/RunIngestModulesAction.java @@ -49,7 +49,7 @@ public final class RunIngestModulesAction extends AbstractAction { @Messages("RunIngestModulesAction.name=Run Ingest Modules") private static final long serialVersionUID = 1L; - private static final Logger logger = Logger.getLogger(SpecialDirectoryNode.class.getName()); + private static final Logger logger = Logger.getLogger(RunIngestModulesAction.class.getName()); /* * Note that the execution context is the name of the dialog that used to be diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index 6efd60cf2a..2dfd75254c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -23,6 +23,7 @@ import java.sql.SQLException; import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.logging.Level; @@ -536,7 +537,17 @@ public class FileSystemColumnUtils { return getDisplayableContentForTableAndTree(content); } - static List getDisplayableContentForTableAndTree(Content content) throws TskCoreException { + /** + * Get the displayable content children in common between the table and tree views. + * Advances past content types we do not display (volume systems, file systems, root folder). + * + * @param content The content to get the children of. + * + * @return List of displayable content children. + * + * @throws TskCoreException + */ + private static List getDisplayableContentForTableAndTree(Content content) throws TskCoreException { // If the given content is displayable, return it if (FileSystemColumnUtils.isDisplayable(content)) { return Arrays.asList(content); @@ -586,18 +597,25 @@ public class FileSystemColumnUtils { return new ColumnKey(name, name, Bundle.FileSystemColumnUtils_noDescription()); } + /** + * Get the children of a given content ID that will be visible in the tree. + * + * @param contentId The ID of the parent content. + * + * @return The visible children of the given content. + * + * @throws TskCoreException + * @throws NoCurrentCaseException + */ public static List getVisibleTreeNodeChildren(Long contentId) throws TskCoreException, NoCurrentCaseException { SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase(); Content content = skCase.getContentById(contentId); - System.out.println("### getting displayable children for " + content.getClass().getSimpleName() - + " with ID " + contentId); List originalChildren = content.getChildren(); - // TODO so much filtering // First, advance past anything we don't display (volume systems, file systems, root folders) List treeChildren = new ArrayList<>(); for (Content child : originalChildren) { - treeChildren.addAll(FileSystemColumnUtils.getDisplayableContentForTableAndTree(child)); // TODO known, slack + treeChildren.addAll(FileSystemColumnUtils.getDisplayableContentForTableAndTree(child)); } // Filter out the . and .. directories @@ -615,20 +633,26 @@ public class FileSystemColumnUtils { iter.remove(); } } - - // sort? maybe sort earlier... + return treeChildren; } - private static boolean hasDisplayableContentChildren(AbstractFile file) { + /** + * Check whether a file has displayable children. + * + * @param file The file to check. + * + * @return True if the file has displayable children, false otherwise. + */ + private static boolean hasDisplayableContentChildren(AbstractFile file) { if (file != null) { try { + // If the file has no children at all, then it has no displayable children. if (!file.hasChildren()) { return false; } } catch (TskCoreException ex) { - - //logger.log(Level.SEVERE, "Error checking if the node has children, for content: " + c, ex); //NON-NLS + logger.log(Level.SEVERE, "Error checking if the node has children for file with ID: " + file.getId(), ex); //NON-NLS return false; } @@ -647,7 +671,7 @@ public class FileSystemColumnUtils { return (0 < resultSet.getInt("count")); } } catch (TskCoreException | SQLException | NoCurrentCaseException ex) { - //logger.log(Level.SEVERE, "Error checking if the node has children, for content: " + c, ex); //NON-NLS + logger.log(Level.SEVERE, "Error checking if the node has children for file with ID: " + file.getId(), ex); //NON-NLS } } return false; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 214633ae67..41b3866b9d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -21,24 +21,18 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; -import java.sql.ResultSet; -import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; -import java.util.Iterator; import java.util.List; -import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.DirectoryRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.ImageRowDTO; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.VolumeRowDTO; @@ -51,7 +45,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileSystemRowDTO.PoolRowDTO; import static org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils.getExtensionMediaType; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Directory; @@ -312,9 +305,7 @@ public class FileSystemDAO { ds.getName(), null )); - // TODO sort } - return new TreeResultsDTO<>(treeItemRows); } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching images for host with ID " + host.getHostId(), ex); @@ -340,11 +331,11 @@ public class FileSystemDAO { } /** - * TODO + * Get the children that will be displayed in the tree for a given content ID. * * @param contentId Object ID of parent content. * - * @return + * @return The results. * * @throws ExecutionException */ @@ -358,12 +349,8 @@ public class FileSystemDAO { Long countForNode = null; if ((child instanceof AbstractFile) && ! (child instanceof LocalFilesDataSource)) { - countForNode = new Long(child.getChildrenCount()); // TODO probably not correct + countForNode = new Long(child.getChildrenCount()); // TODO does not account for hidden children } - // public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) { - System.out.println("### Creating TreeItemDTO for " + child.getClass().getSimpleName() - + " child with name: " + child.getName() - + " and ID: " + child.getId()); treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( child.getClass().getSimpleName(), new FileSystemContentSearchParam(child.getId()), @@ -371,24 +358,7 @@ public class FileSystemDAO { getNameForContent(child), countForNode )); - // TODO sort } - - // get row dto's sorted by display name - //Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); - //List> treeItemRows = typeCounts.entrySet().stream() - // .map(entry -> { - // return new TreeResultsDTO.TreeItemDTO<>( - // BlackboardArtifact.Category.DATA_ARTIFACT.name(), - // new DataArtifactSearchParam(entry.getKey(), dataSourceId), - // entry.getKey().getTypeID(), - // entry.getKey().getDisplayName(), - // entry.getValue()); - // }) - // .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) - // .collect(Collectors.toList()); - - // return results return new TreeResultsDTO<>(treeItemRows); } catch (NoCurrentCaseException | TskCoreException ex) { @@ -400,6 +370,8 @@ public class FileSystemDAO { // Currently the only special case is for volumes if (content instanceof Volume) { return FileSystemColumnUtils.getVolumeDisplayName((Volume)content); + } else if (content instanceof AbstractFile) { + return FileSystemColumnUtils.convertDotDirName((AbstractFile) content); } return content.getName(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java index 5cd4edba0e..32e1081f27 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -23,15 +23,15 @@ import java.util.Optional; import org.openide.nodes.Children; import org.openide.nodes.Node; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; import javax.swing.Action; -import org.openide.nodes.ChildFactory; import org.openide.util.Lookup; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; -import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction; import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction; import org.sleuthkit.autopsy.ingest.IngestManager; @@ -44,46 +44,50 @@ import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.CARVED_FILE; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FILE; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.DELETED_FOLDER; -import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FILE; import static org.sleuthkit.autopsy.mainui.nodes.NodeIconUtil.FOLDER; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; import org.sleuthkit.autopsy.mainui.nodes.actions.ActionContext; import org.sleuthkit.autopsy.mainui.nodes.actions.ActionsFactory; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LocalDirectory; import org.sleuthkit.datamodel.LocalFilesDataSource; -import org.sleuthkit.datamodel.VirtualDirectory; -import org.sleuthkit.datamodel.Volume; import org.sleuthkit.datamodel.Pool; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.VirtualDirectory; +import org.sleuthkit.datamodel.Volume; /** * Factory for displaying content in the data source section of the tree. */ public class FileSystemFactory extends TreeChildFactory { + private static final Logger logger = Logger.getLogger(FileSystemFactory.class.getName()); + private Long contentId = null; private Host host = null; /** - * Main constructor. + * Create a factory for a given parent content ID. * * @param contentId The object ID for this node */ public FileSystemFactory(Long contentId) { - System.out.println("### Creating FileSystemFactory with content ID: " + contentId); this.contentId = contentId; } - + + /** + * Create a factory for a given parent Host. + * + * @param host The parent host for this node + */ public FileSystemFactory(Host host) { - System.out.println("### Creating FileSystemFactory with host ID: " + host.getHostId()); this.host = host; } @@ -91,11 +95,9 @@ public class FileSystemFactory extends TreeChildFactory getChildResults() throws IllegalArgumentException, ExecutionException { if (host == null) { TreeResultsDTO results = MainDAO.getInstance().getFileSystemDAO().getDisplayableContentChildren(contentId); - System.out.println("### getChildResults() for id: " + contentId + " has " + results.getItems().size() + " rows"); return results; } else { TreeResultsDTO results = MainDAO.getInstance().getFileSystemDAO().getDataSourcesForHost(host); - System.out.println("### getChildResults() for host: " + host.getName() + " has " + results.getItems().size() + " rows"); return results; } } @@ -109,7 +111,7 @@ public class FileSystemFactory extends TreeChildFactory { + private final long dataSourceId; - + + /** + * Create the factory for a given data source object ID. + * + * @param dataSourceId The data source object ID. + */ public DataSourceFactory(long dataSourceId) { - System.out.println("### Creating DataSourceFactory with dataSourceId: " + dataSourceId); this.dataSourceId = dataSourceId; } - + @Override protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { // We're not really getting children here, just creating a node for the data source itself. return MainDAO.getInstance().getFileSystemDAO().getSingleDataSource(dataSourceId); } - + @Override protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - try { + try { DataSource ds = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(dataSourceId); if (ds instanceof Image) { return new ImageTreeNode((Image) ds, rowData); } else if (ds instanceof LocalFilesDataSource) { return new LocalFilesDataSourceTreeNode((LocalFilesDataSource) ds, rowData); } else { - // There shouldn't be any other type - // TODO log + logger.log(Level.SEVERE, "Unexpected data source type (ID: {0})", dataSourceId); return null; } - } catch (NoCurrentCaseException | TskCoreException | TskDataException ex) { - // TODO log + } catch (NoCurrentCaseException ex) { + // Case is likely closing + return null; + } catch (TskCoreException | TskDataException ex) { + logger.log(Level.SEVERE, "Error creating node from data source with ID: " + dataSourceId, ex); return null; } } - - + @Override public boolean isRefreshRequired(PropertyChangeEvent evt) { // TODO return false; } - + } /** * Display name and count of a file system node in the tree. */ @NbBundle.Messages({ - "FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files"}) + "FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files"}) public abstract static class FileSystemTreeNode extends TreeNode implements ActionContext { - protected FileSystemTreeNode(String nodeName, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { super(nodeName, icon, itemData, children, lookup); - //super(nodeName, "org/sleuthkit/autopsy/images/bank.png", itemData, children, lookup); } - + protected static Children createChildrenForContent(Long contentId) { try { if (FileSystemColumnUtils.getVisibleTreeNodeChildren(contentId).isEmpty()) { @@ -231,8 +238,10 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(itemData.getDisplayName(), NodeIconUtil.IMAGE.getPath(), @@ -260,13 +270,12 @@ public class FileSystemFactory extends TreeChildFactory getNodeSpecificActions() { ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); @@ -280,11 +289,6 @@ public class FileSystemFactory extends TreeChildFactory getNewWindowActionNode() { - return Optional.of(this); - } - @Override public boolean supportsSourceContentViewerActions() { return true; @@ -292,8 +296,9 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(FileSystemColumnUtils.getVolumeDisplayName(volume), NodeIconUtil.VOLUME.getPath(), @@ -301,13 +306,12 @@ public class FileSystemFactory extends TreeChildFactory getNodeSpecificActions() { ActionsFactory.ActionGroup group = new ActionsFactory.ActionGroup(); @@ -317,11 +321,6 @@ public class FileSystemFactory extends TreeChildFactory getNewWindowActionNode() { - return Optional.of(this); - } - @Override public boolean supportsSourceContentViewerActions() { return true; @@ -329,8 +328,9 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(itemData.getDisplayName(), NodeIconUtil.VOLUME.getPath(), @@ -338,17 +338,17 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(itemData.getDisplayName(), getDirectoryIcon(dir), @@ -356,9 +356,8 @@ public class FileSystemFactory extends TreeChildFactory itemData, Children children, Lookup lookup) { super(nodeName, icon, itemData, children, lookup); this.dir = dir; } - + @Override public boolean supportsSourceContentViewerActions() { return true; } - @Override - public Optional getNewWindowActionNode() { - return Optional.of(this); - } - @Override public boolean supportsTreeExtractActions() { return true; @@ -425,30 +420,30 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(dir, + super(dir, itemData.getDisplayName(), NodeIconUtil.FOLDER.getPath(), itemData, createChildrenForContent(itemData.getTypeData().getContentObjectId()), ContentNodeUtil.getLookup(dir)); - System.out.println("### LocalDirectoryTreeNode - name: " + itemData.getDisplayName() + ", contentId: " + itemData.getTypeData().getContentObjectId()); } - + public Node clone() { - return new DirectoryTreeNode(dir, getItemData()); + return new LocalDirectoryTreeNode(dir, getItemData()); } - + @Override public boolean supportsContentTagAction() { return true; } } - + static class LocalFilesDataSourceTreeNode extends SpecialDirectoryTreeNode { - + LocalFilesDataSourceTreeNode(AbstractFile localFilesDataSource, TreeResultsDTO.TreeItemDTO itemData) { super(localFilesDataSource, itemData.getDisplayName(), @@ -456,21 +451,20 @@ public class FileSystemFactory extends TreeChildFactory getDataSourceForActions() { return Optional.of(dir); } - } - + } + static class VirtualDirectoryTreeNode extends SpecialDirectoryTreeNode { - + VirtualDirectoryTreeNode(AbstractFile dir, TreeResultsDTO.TreeItemDTO itemData) { super(dir, itemData.getDisplayName(), @@ -478,17 +472,17 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(itemData.getDisplayName(), getFileIcon(file), @@ -496,13 +490,12 @@ public class FileSystemFactory extends TreeChildFactory getFileForDirectoryBrowseMode() { - // TODO What is this? return Optional.of(file); } @@ -566,18 +558,19 @@ public class FileSystemFactory extends TreeChildFactory 0; } catch (TskCoreException ex) { - // TODO + logger.log(Level.SEVERE, "Error loading artifacts for file with ID: " + file.getId(), ex); } return encryptionDetected ? Optional.of(file) : Optional.empty(); } } - + @NbBundle.Messages({ "FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content",}) static class UnsupportedTreeNode extends FileSystemTreeNode { + Content content; - + UnsupportedTreeNode(Content content, TreeResultsDTO.TreeItemDTO itemData) { super(Bundle.FileSystemFactory_UnsupportedTreeNode_displayName(), NodeIconUtil.FILE.getPath(), @@ -585,9 +578,8 @@ public class FileSystemFactory extends TreeChildFactorysingletonList(optional.get()))); + if (optional.get() instanceof AbstractFile) { + group.add(new RunIngestModulesAction((AbstractFile)optional.get())); + } else { + logger.log(Level.WARNING, "Can not create RunIngestModulesAction on non-AbstractFile content with ID " + optional.get().getId()); + } } } From 71fd1f7619ab17cd34f00ade441e1e432ba4aa49 Mon Sep 17 00:00:00 2001 From: Kelly Kelly Date: Mon, 22 Nov 2021 15:54:08 -0500 Subject: [PATCH 071/142] Added perisistance for new nodes --- .../corecomponents/DataResultViewerTable.java | 109 +++++++++++++----- .../ResultViewerPersistence.java | 21 ++++ .../AnalysisResultTableSearchResultsDTO.java | 3 +- .../datamodel/BaseSearchResultsDTO.java | 13 ++- .../mainui/datamodel/Bundle.properties-MERGED | 2 + .../DataArtifactTableSearchResultsDTO.java | 3 +- .../mainui/datamodel/FileSystemDAO.java | 4 +- .../mainui/datamodel/OsAccountsDAO.java | 2 +- .../mainui/datamodel/SearchResultsDTO.java | 2 + .../autopsy/mainui/datamodel/TagsDAO.java | 4 +- .../autopsy/mainui/datamodel/ViewsDAO.java | 2 +- 11 files changed, 126 insertions(+), 39 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java index f844adccb8..dffd0631ce 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java @@ -528,13 +528,15 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - TableFilterNode tfn = (TableFilterNode) rootNode; + if (rootNode instanceof TableFilterNode || searchResults != null) { + TableFilterNode tfn = searchResults == null ? (TableFilterNode) rootNode : null; final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); final ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel(); for (Map.Entry entry : columnMap.entrySet()) { String columnName = entry.getKey(); - final String columnHiddenKey = ResultViewerPersistence.getColumnHiddenKey(tfn, columnName); + final String columnHiddenKey = + tfn != null ? ResultViewerPersistence.getColumnHiddenKey(tfn, columnName) : + ResultViewerPersistence.getColumnHiddenKey(searchResults, columnName); final TableColumn column = entry.getValue(); boolean columnHidden = columnModel.isColumnHidden(column); if (columnHidden) { @@ -554,12 +556,14 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - TableFilterNode tfn = (TableFilterNode) rootNode; + if (rootNode instanceof TableFilterNode || searchResults != null) { + TableFilterNode tfn = searchResults == null ? (TableFilterNode) rootNode : null; final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); // Store the current order of the columns into settings for (Map.Entry> entry : propertiesMap.entrySet()) { - preferences.putInt(ResultViewerPersistence.getColumnPositionKey(tfn, entry.getValue().getName()), entry.getKey()); + preferences.putInt(tfn != null ? + ResultViewerPersistence.getColumnPositionKey(tfn, entry.getValue().getName()) : + ResultViewerPersistence.getColumnPositionKey(searchResults, entry.getValue().getName()), entry.getKey()); } } } @@ -571,16 +575,20 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - final TableFilterNode tfn = ((TableFilterNode) rootNode); + if (rootNode instanceof TableFilterNode || searchResults != null) { + final TableFilterNode tfn = searchResults == null ? ((TableFilterNode) rootNode) : null; final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel(); for (Map.Entry entry : columnMap.entrySet()) { ETableColumn etc = entry.getValue(); String columnName = entry.getKey(); //store sort rank and order - final String columnSortOrderKey = ResultViewerPersistence.getColumnSortOrderKey(tfn, columnName); - final String columnSortRankKey = ResultViewerPersistence.getColumnSortRankKey(tfn, columnName); + final String columnSortOrderKey = + searchResults == null ? ResultViewerPersistence.getColumnSortOrderKey(tfn, columnName) : + ResultViewerPersistence.getColumnSortOrderKey(searchResults, columnName); + final String columnSortRankKey = + searchResults == null ? ResultViewerPersistence.getColumnSortRankKey(tfn, columnName): + ResultViewerPersistence.getColumnSortRankKey(searchResults, columnName); if (etc.isSorted() && (columnModel.isColumnHidden(etc) == false)) { preferences.putBoolean(columnSortOrderKey, etc.isAscending()); preferences.putInt(columnSortRankKey, etc.getSortRank()); @@ -590,7 +598,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer { preferences.remove(columnSortRankKey); } } - } + } } /** @@ -603,17 +611,23 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { - final TableFilterNode tfn = (TableFilterNode) rootNode; + if (rootNode instanceof TableFilterNode || searchResults != null) { + final TableFilterNode tfn = (searchResults == null ? (TableFilterNode) rootNode : null); final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); //organize property sorting information, sorted by rank TreeSet sortInfos = new TreeSet<>(Comparator.comparing(ColumnSortInfo::getRank)); propertiesMap.entrySet().stream().forEach(entry -> { final String propName = entry.getValue().getName(); //if the sort rank is undefined, it will be defaulted to 0 => unsorted. - Integer sortRank = preferences.getInt(ResultViewerPersistence.getColumnSortRankKey(tfn, propName), 0); + Integer sortRank = preferences.getInt( + tfn != null ? + ResultViewerPersistence.getColumnSortRankKey(tfn, propName) : + ResultViewerPersistence.getColumnSortRankKey(searchResults, propName), 0); //default to true => ascending - Boolean sortOrder = preferences.getBoolean(ResultViewerPersistence.getColumnSortOrderKey(tfn, propName), true); + Boolean sortOrder = preferences.getBoolean( + tfn != null ? + ResultViewerPersistence.getColumnSortOrderKey(tfn, propName) : + ResultViewerPersistence.getColumnSortOrderKey(searchResults, propName), true); sortInfos.add(new ColumnSortInfo(entry.getKey(), sortRank, sortOrder)); }); //apply sort information in rank order. @@ -629,13 +643,16 @@ public class DataResultViewerTable extends AbstractDataResultViewer { if (rootNode == null || propertiesMap.isEmpty()) { return; } - if (rootNode instanceof TableFilterNode) { + if (rootNode instanceof TableFilterNode || searchResults != null) { final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); - final TableFilterNode tfn = ((TableFilterNode) rootNode); + final TableFilterNode tfn = (searchResults == null ? ((TableFilterNode) rootNode) : null); ETableColumnModel columnModel = (ETableColumnModel) outline.getColumnModel(); for (Map.Entry> entry : propertiesMap.entrySet()) { final String propName = entry.getValue().getName(); - boolean hidden = preferences.getBoolean(ResultViewerPersistence.getColumnHiddenKey(tfn, propName), false); + boolean hidden = preferences.getBoolean( + tfn != null ? + ResultViewerPersistence.getColumnHiddenKey(tfn, propName) : + ResultViewerPersistence.getColumnHiddenKey(searchResults, propName), false); final TableColumn column = columnMap.get(propName); columnModel.setColumnHidden(column, hidden); } @@ -653,16 +670,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer { private synchronized List> loadColumnOrder() { if (searchResults != null) { - return searchResults.getColumns().stream() - .map(columnKey -> { - return new NodeProperty<>( - columnKey.getFieldName(), - columnKey.getDisplayName(), - columnKey.getDescription(), - "" - ); - }) - .collect(Collectors.toList()); + return loadColumnOrderForSearchResults(); } List> props = ResultViewerPersistence.getAllChildProperties(rootNode, 100); @@ -705,6 +713,51 @@ public class DataResultViewerTable extends AbstractDataResultViewer { return new ArrayList<>(propertiesMap.values()); } + + private synchronized List> loadColumnOrderForSearchResults() { + List> props = searchResults.getColumns().stream() + .map(columnKey -> { + return new NodeProperty<>( + columnKey.getFieldName(), + columnKey.getDisplayName(), + columnKey.getDescription(), + "" + ); + }) + .collect(Collectors.toList()); + + propertiesMap.clear(); + + /* + * We load column index values into the properties map. If a property's + * index is outside the range of the number of properties or the index + * has already appeared as the position of another property, we put that + * property at the end. + */ + int offset = props.size(); + + final Preferences preferences = NbPreferences.forModule(DataResultViewerTable.class); + + for (Property prop : props) { + Integer value = preferences.getInt(ResultViewerPersistence.getColumnPositionKey(searchResults, prop.getName()), -1); + if (value >= 0 && value < offset && !propertiesMap.containsKey(value)) { + propertiesMap.put(value, prop); + } else { + propertiesMap.put(offset, prop); + offset++; + } + } + + /* + * NOTE: it is possible to have "discontinuities" in the keys (i.e. + * column numbers) of the map. This happens when some of the columns had + * a previous setting, and other columns did not. We need to make the + * keys 0-indexed and continuous. + */ + compactPropertiesMap(); + + return new ArrayList<>(propertiesMap.values()); + } /** * Makes properties map 0-indexed and re-arranges elements to make sure the diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java b/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java index a981b04112..645bfaadaa 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/ResultViewerPersistence.java @@ -28,6 +28,7 @@ import javax.swing.SortOrder; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.util.NbPreferences; +import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO; final class ResultViewerPersistence { @@ -46,6 +47,10 @@ final class ResultViewerPersistence { static String getColumnPositionKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".column"; } + + static String getColumnPositionKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".column"; + } /** * Gets a key for the given node and a property of its child nodes to store @@ -59,6 +64,10 @@ final class ResultViewerPersistence { static String getColumnSortOrderKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".sortOrder"; } + + static String getColumnSortOrderKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".sortOrder"; + } /** * Gets a key for the given node and a property of its child nodes to store @@ -72,6 +81,10 @@ final class ResultViewerPersistence { static String getColumnSortRankKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".sortRank"; } + + static String getColumnSortRankKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".sortRank"; + } /** * Gets a key for the given node and a property of its child nodes to store @@ -85,10 +98,18 @@ final class ResultViewerPersistence { static String getColumnHiddenKey(TableFilterNode node, String propName) { return getColumnKeyBase(node, propName) + ".hidden"; } + + static String getColumnHiddenKey(SearchResultsDTO searchResult, String propName) { + return getColumnKeyBase(searchResult, propName) + ".hidden"; + } private static String getColumnKeyBase(TableFilterNode node, String propName) { return stripNonAlphanumeric(node.getColumnOrderKey()) + "." + stripNonAlphanumeric(propName); } + + private static String getColumnKeyBase(SearchResultsDTO searchResult, String propName) { + return stripNonAlphanumeric(searchResult.getSignature()) + "." + stripNonAlphanumeric(propName); + } private static String stripNonAlphanumeric(String str) { return str.replaceAll("[^a-zA-Z0-9_]", ""); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java index 300d8004a0..de243f83ec 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultTableSearchResultsDTO.java @@ -27,11 +27,12 @@ import org.sleuthkit.datamodel.BlackboardArtifact; public class AnalysisResultTableSearchResultsDTO extends BaseSearchResultsDTO { private static final String TYPE_ID = "ANALYSIS_RESULT"; + private static final String SIGNATURE = "analysisresult"; private final BlackboardArtifact.Type artifactType; public AnalysisResultTableSearchResultsDTO(BlackboardArtifact.Type artifactType, List columns, List items, long startItem, long totalResultsCount) { - super(TYPE_ID, artifactType.getDisplayName(), columns, items, startItem, totalResultsCount); + super(TYPE_ID, artifactType.getDisplayName(), columns, items, SIGNATURE, startItem, totalResultsCount); this.artifactType = artifactType; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java index 62c3d09410..767a3f15e3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BaseSearchResultsDTO.java @@ -31,18 +31,20 @@ public class BaseSearchResultsDTO implements SearchResultsDTO { private final List items; private final long totalResultsCount; private final long startItem; + private final String signature; - public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items) { - this(typeId, displayName, columns, items, 0, items == null ? 0 : items.size()); + public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items, String signature) { + this(typeId, displayName, columns, items, signature, 0, items == null ? 0 : items.size()); } - public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items, long startItem, long totalResultsCount) { + public BaseSearchResultsDTO(String typeId, String displayName, List columns, List items, String signature, long startItem, long totalResultsCount) { this.typeId = typeId; this.displayName = displayName; this.columns = columns; this.items = items; this.startItem = startItem; this.totalResultsCount = totalResultsCount; + this.signature = signature; } @Override @@ -74,4 +76,9 @@ public class BaseSearchResultsDTO implements SearchResultsDTO { public long getStartItem() { return startItem; } + + @Override + public String getSignature() { + return signature; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED index ab71f5d900..e84fa793b3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED @@ -34,6 +34,8 @@ BlackboardArtifactDAO.columnKeys.score.name=Score BlackboardArtifactDAO.columnKeys.srcFile.description=Source Name BlackboardArtifactDAO.columnKeys.srcFile.displayName=Source Name BlackboardArtifactDAO.columnKeys.srcFile.name=Source Name +CommAccounts.name.text=Communication Accounts +CommAccountsDAO.fileColumns.noDescription=No Description FileExtDocumentFilter_html_displayName=HTML FileExtDocumentFilter_office_displayName=Office FileExtDocumentFilter_pdf_displayName=PDF diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java index 0ddb4ce52e..45cb791826 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactTableSearchResultsDTO.java @@ -27,11 +27,12 @@ import org.sleuthkit.datamodel.BlackboardArtifact; public class DataArtifactTableSearchResultsDTO extends BaseSearchResultsDTO { private static final String TYPE_ID = "DATA_ARTIFACT"; + private static final String SIGNATURE = "dataartifact"; private final BlackboardArtifact.Type artifactType; public DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type artifactType, List columns, List items, long startItem, long totalResultsCount) { - super(TYPE_ID, artifactType.getDisplayName(), columns, items, startItem, totalResultsCount); + super(TYPE_ID, artifactType.getDisplayName(), columns, items, SIGNATURE, startItem, totalResultsCount); this.artifactType = artifactType; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index b3fa860248..7dd180127a 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -174,7 +174,7 @@ public class FileSystemDAO { List cellValues = FileSystemColumnUtils.getCellValuesForHost(host); rows.add(new BaseRowDTO(cellValues, FILE_SYSTEM_TYPE_ID, host.getHostId())); } - return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), hostsForTable.size()); + return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, Host.class.getName(), cacheKey.getStartItem(), hostsForTable.size()); } private BaseSearchResultsDTO fetchContentForTable(SearchParams cacheKey, List contentForTable, @@ -237,7 +237,7 @@ public class FileSystemDAO { cellValues)); } } - return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, cacheKey.getStartItem(), contentForTable.size()); + return new BaseSearchResultsDTO(FILE_SYSTEM_TYPE_ID, parentName, columnKeys, rows, FILE_SYSTEM_TYPE_ID, cacheKey.getStartItem(), contentForTable.size()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 98b32b6deb..a149c89708 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -163,7 +163,7 @@ public class OsAccountsDAO { cellValues)); }; - return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, 0, allAccounts.size()); + return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, OS_ACCOUNTS_TYPE_ID, 0, allAccounts.size()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java index 8f9e4b9caa..3d723265df 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java @@ -37,4 +37,6 @@ public interface SearchResultsDTO { long getTotalResultsCount(); long getStartItem(); + + String getSignature(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index bf5e844e72..086f20f706 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -221,7 +221,7 @@ public class TagsDAO { blackboardTag.getId())); } - return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, 0, allTags.size()); + return new BaseSearchResultsDTO(BlackboardArtifactTagsRowDTO.getTypeIdForClass(), Bundle.ResultTag_name_text(), RESULT_TAG_COLUMNS, fileRows, BlackboardArtifactTag.class.getName(), 0, allTags.size()); } private SearchResultsDTO fetchFileTags(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -274,7 +274,7 @@ public class TagsDAO { file.getId())); } - return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, 0, allTags.size()); + return new BaseSearchResultsDTO(ContentTagsRowDTO.getTypeIdForClass(), Bundle.FileTag_name_text(), FILE_TAG_COLUMNS, fileRows, ContentTag.class.getName(), 0, allTags.size()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 6f88d47289..44bcc98c0c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -639,7 +639,7 @@ public class ViewsDAO { cellValues)); } - return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, startItem, totalResultsCount); + return new BaseSearchResultsDTO(FILE_VIEW_EXT_TYPE_ID, displayName, FileSystemColumnUtils.getColumnKeysForAbstractfile(), fileRows, AbstractFile.class.getName(), startItem, totalResultsCount); } /** From f13a3620f113acb63d4b79a9f07e28f3897f3704 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 22 Nov 2021 16:10:08 -0500 Subject: [PATCH 072/142] starting code --- .../corecomponents/DataResultPanel.java | 4 +- .../autopsy/mainui/datamodel/AbstractDAO.java | 19 +-- .../mainui/datamodel/AnalysisResultDAO.java | 8 +- .../mainui/datamodel/DataArtifactDAO.java | 2 +- .../autopsy/mainui/datamodel/MainDAO.java | 84 +++++++++---- .../autopsy/mainui/datamodel/TreeCount.java | 53 ++++++++ .../mainui/datamodel/TreeResultsDTO.java | 14 +-- .../autopsy/mainui/datamodel/ViewsDAO.java | 6 +- .../datamodel/events/DAOAggregateEvent.java | 14 +-- .../datamodel/events/DAOEventBatcher.java | 30 ++++- .../mainui/datamodel/events/TreeEvent.java | 76 ++++++++++++ .../datamodel/events/TreeEventTimer.java | 115 ++++++++++++++++++ .../nodes/AnalysisResultTypeFactory.java | 82 ++++++------- .../mainui/nodes/DataArtifactTypeFactory.java | 68 +++++------ .../mainui/nodes/TreeChildFactory.java | 50 ++++---- .../autopsy/mainui/nodes/TreeNode.java | 8 ++ .../mainui/nodes/ViewsTypeFactory.java | 70 +++++------ 17 files changed, 503 insertions(+), 200 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index a47dcbdf28..67321393c9 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -439,7 +439,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private void initListeners() { UserPreferences.addChangeListener(this.pageSizeListener); Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this.weakCaseEventListener); - this.mainDAO.addPropertyChangeListener(this.weakDAOListener); + this.mainDAO.getResultEventsManager().addPropertyChangeListener(this.weakDAOListener); IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, this.weakDAOListener); } @@ -449,7 +449,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private void closeListeners() { UserPreferences.removeChangeListener(this.pageSizeListener); Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.CURRENT_CASE), this.weakCaseEventListener); - this.mainDAO.removePropertyChangeListener(this.weakDAOListener); + this.mainDAO.getResultEventsManager().removePropertyChangeListener(this.weakDAOListener); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index d140dabec3..de72890777 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -34,19 +34,20 @@ abstract class AbstractDAO { abstract void clearCaches(); /** - * Handles an autopsy event (i.e. ingest, case, etc.). This method is responsible - * for clearing internal caches that are effected by the event and returning - * one or more DAOEvents that should be broadcasted to the views. + * Handles an autopsy event (i.e. ingest, case, etc.). This method is + * responsible for clearing internal caches that are effected by the event + * and returning one or more DAOEvents that should be broadcasted to the + * views. * * This method is responsible for minimizing the number of DAOEvents that - * are returned. For example, if there are 100 Autopsy events for the - * same type of data artifact in the same data source, then only a single - * DataArtifact event needs to be returned. + * are returned. For example, if there are 100 Autopsy events for the same + * type of data artifact in the same data source, then only a single + * DataArtifact event needs to be returned. * - * @param evt The Autopsy events that recently came in from Ingest/Case. + * @param evt The Autopsy events that recently came in from Ingest/Case. * - * @return The list of DAOEvents that should be broadcasted to the views or - * an empty list if the Autopsy events are irrelevant to this DAO. + * @return The list of DAOEvents that should be broadcasted to the views or + * an empty list if the Autopsy events are irrelevant to this DAO. */ abstract List handleAutopsyEvent(Collection evt); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 52269c762f..8e412ae3af 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -353,7 +353,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new AnalysisResultSearchParam(entry.getKey(), dataSourceId), entry.getKey().getTypeID(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeCount.getDeterminate(entry.getValue())); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -452,7 +452,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new AnalysisResultSetSearchParam(type, dataSourceId, entry.getKey()), entry.getKey() == null ? 0 : entry.getKey(), entry.getKey() == null ? nullSetName : entry.getKey(), - entry.getValue()); + TreeCount.getDeterminate(entry.getValue())); }) .collect(Collectors.toList()); @@ -595,7 +595,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new KeywordSearchTermParams(setName, searchTerm, searchType, hasChildren, dataSourceId), searchTermModified, searchTermModified, - count + TreeCount.getDeterminate(count) )); } } catch (SQLException ex) { @@ -685,7 +685,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId), keyword, keyword == null ? "" : keyword, - count)); + TreeCount.getDeterminate(count))); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index b46c710113..c89d1cc4b8 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -152,7 +152,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { new DataArtifactSearchParam(entry.getKey(), dataSourceId), entry.getKey().getTypeID(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeCount.getDeterminate(entry.getValue())); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 55d7fa1a22..69a4ad5e86 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -27,17 +27,19 @@ import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.prefs.PreferenceChangeListener; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.collections.CollectionUtils; import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimer; /** * Main entry point for DAO for providing data to populate the data results @@ -58,7 +60,9 @@ public class MainDAO extends AbstractDAO { Case.Events.OS_ACCT_INSTANCES_ADDED.toString() ); - private static final long MILLIS_BATCH = 5000; + private static final long MILLIS_BATCH = 5 * 1000; + private static final long TREE_TIMEOUT_MILLIS = 2 * 60 * 1000; + private static final long TREE_CHECK_RESOLUTION_MILLIS = 10 * 1000; private static MainDAO instance = null; @@ -78,7 +82,7 @@ public class MainDAO extends AbstractDAO { if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { this.clearCaches(); } else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) { - queueAutopsyEvent(evt); + enqueueAutopsyEvent(evt); } else { // handle case events immediately handleAutopsyEvent(Arrays.asList(evt)); @@ -96,12 +100,20 @@ public class MainDAO extends AbstractDAO { * The ingest module event listener. */ private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - queueAutopsyEvent(evt); + enqueueAutopsyEvent(evt); }; - private final PropertyChangeSupport support = new PropertyChangeSupport(this); + private final PropertyChangeManager resultEventsManager = new PropertyChangeManager(); + private final PropertyChangeManager treeEventsManager = new PropertyChangeManager(); - private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>((evts) -> this.getDAOEventsAndFire(evts), MILLIS_BATCH); + private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>( + (evts) -> resultEventsManager.firePropertyChange("DATA_CHANGE", null, new DAOAggregateEvent(evts)), MILLIS_BATCH); + + private final TreeEventTimer treeEventTimer = new TreeEventTimer<>( + (evts, determinate) -> fireTreeEvents(evts, determinate), + TREE_TIMEOUT_MILLIS, + TREE_CHECK_RESOLUTION_MILLIS + ); private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance(); private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance(); @@ -144,7 +156,7 @@ public class MainDAO extends AbstractDAO { public OsAccountsDAO getOsAccountsDAO() { return osAccountsDAO; } - + public CommAccountsDAO getCommAccountsDAO() { return commAccountsDAO; } @@ -154,6 +166,7 @@ public class MainDAO extends AbstractDAO { allDAOs.forEach((subDAO) -> subDAO.clearCaches()); } + // TODO breakup @Override List handleAutopsyEvent(Collection evt) { return allDAOs.stream() @@ -162,25 +175,31 @@ public class MainDAO extends AbstractDAO { .collect(Collectors.toList()); } + private void fireTreeEvents(Collection evts, boolean determinate) { + List treeEvts = evts.stream() + .map((daoEvt) -> new TreeEvent(daoEvt, determinate)) + .collect(Collectors.toList()); + + treeEventsManager.firePropertyChange("TREE_CHANGE", null, new DAOAggregateEvent(treeEvts)); + } + /** - * Determines DAO events from autopsy events and fires DAO aggregate event - * if there are any created DAO events. + * Handle incoming autopsy event by queueing in batch and firing events. * - * @param evts The autopsy events. + * @param autopsyEvent The autopsy event. */ - private void getDAOEventsAndFire(Collection evts) { - List daoEvents = handleAutopsyEvent(evts); - if (!CollectionUtils.isEmpty(daoEvents)) { - support.firePropertyChange(new PropertyChangeEvent(this, "DATA_CHANGE", null, new DAOAggregateEvent(daoEvents))); - } + private void enqueueAutopsyEvent(PropertyChangeEvent autopsyEvent) { + List daoEvents = handleAutopsyEvent(Collections.singletonList(autopsyEvent)); + this.eventBatcher.enqueueAllEvents(daoEvents); + this.treeEventTimer.enqueueAll(daoEvents); } - public void addPropertyChangeListener(PropertyChangeListener listener) { - support.addPropertyChangeListener(listener); + public PropertyChangeManager getResultEventsManager() { + return this.resultEventsManager; } - public void removePropertyChangeListener(PropertyChangeListener listener) { - support.removePropertyChangeListener(listener); + public PropertyChangeManager getTreeEventsManager() { + return treeEventsManager; } /** @@ -207,11 +226,28 @@ public class MainDAO extends AbstractDAO { } /** - * Handle incoming autopsy event by queueing in batch and firing events. - * - * @param autopsyEvent The autopsy event. + * A wrapper around property change support that exposes + * addPropertyChangeListener and removePropertyChangeListener so that weak + * listeners can automatically unregister. */ - private void queueAutopsyEvent(PropertyChangeEvent autopsyEvent) { - this.eventBatcher.queueEvent(autopsyEvent); + public static class PropertyChangeManager { + + private final PropertyChangeSupport support = new PropertyChangeSupport(this); + + public void addPropertyChangeListener(PropertyChangeListener listener) { + support.addPropertyChangeListener(listener); + } + + public void removePropertyChangeListener(PropertyChangeListener listener) { + support.removePropertyChangeListener(listener); + } + + PropertyChangeListener[] getPropertyChangeListeners() { + return support.getPropertyChangeListeners(); + } + + void firePropertyChange(String propertyName, Object oldValue, Object newValue) { + support.firePropertyChange(propertyName, oldValue, newValue); + } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java new file mode 100644 index 0000000000..4a88de5979 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java @@ -0,0 +1,53 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +/** + * Captures the count to be displayed in the UI. + */ +public class TreeCount { + public enum Type { + DETERMINATE, + INDETERMINATE, + NOT_SHOWN + } + + private final Type type; + private final long count; + + public static final TreeCount INDETERMINATE = new TreeCount(Type.INDETERMINATE, -1); + public static final TreeCount NOT_SHOWN = new TreeCount(Type.NOT_SHOWN, -1); + + public static TreeCount getDeterminate(long count) { + return new TreeCount(Type.DETERMINATE, count); + } + + private TreeCount(Type type, long count) { + this.type = type; + this.count = count; + } + + public Type getType() { + return type; + } + + public long getCount() { + return count; + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 2d42b4464e..eb7bcd0c8f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -51,7 +51,7 @@ public class TreeResultsDTO { private final String displayName; private final String typeId; - private final Long count; + private final TreeCount count; private final T typeData; private final Object id; @@ -65,10 +65,9 @@ public class TreeResultsDTO { * @param id The id of this row. Can be any object that * implements equals and hashCode. * @param displayName The display name of this row. - * @param count The count of results for this row or null if not - * applicable. + * @param count The count of results for this row. */ - public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) { + public TreeItemDTO(String typeId, T typeData, Object id, String displayName, TreeCount count) { this.typeId = typeId; this.id = id; this.displayName = displayName; @@ -84,9 +83,9 @@ public class TreeResultsDTO { } /** - * @return The count of results for this row or null if not applicable. + * @return The count of results for this row. */ - public Long getCount() { + public TreeCount getCount() { return count; } @@ -114,7 +113,6 @@ public class TreeResultsDTO { public String getTypeId() { return typeId; } - - + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index f9fe65ad16..95d998962e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -348,7 +348,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeExtensionsSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -382,7 +382,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeSizeSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -467,7 +467,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeMimeSearchParams(entry.getKey(), dataSourceId), name, name, - entry.getValue()); + TreeCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType())) .collect(Collectors.toList()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java index 329a3087bc..adb5d905a9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java @@ -18,29 +18,29 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; -import java.util.List; -import org.apache.commons.collections4.list.UnmodifiableList; +import java.util.Collection; +import java.util.Collections; /** * A single event containing an aggregate of all affected data. */ public class DAOAggregateEvent { - private final List objects; + private final Collection objects; /** * Main constructor. * * @param objects The list of events in this aggregate event. */ - public DAOAggregateEvent(List objects) { - this.objects = UnmodifiableList.unmodifiableList(objects); + public DAOAggregateEvent(Collection objects) { + this.objects = Collections.unmodifiableCollection(objects); } /** - * @return The list of events in this aggregate event. + * @return The events in this aggregate event. */ - public List getEvents() { + public Collection getEvents() { return objects; } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java index 69e472762e..005f5707f5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java @@ -19,9 +19,9 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; +import java.util.HashSet; +import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; @@ -49,7 +49,7 @@ public class DAOEventBatcher { = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat(DAOEventBatcher.class.getName()).build()); - private List aggregateEvents = new ArrayList<>(); + private Set aggregateEvents = new HashSet<>(); private Object eventListLock = new Object(); private boolean isRunning = false; @@ -63,11 +63,21 @@ public class DAOEventBatcher { /** * Queues an event to be fired as a part of a time-windowed batch. + * * @param event The event. */ public void queueEvent(T event) { synchronized (this.eventListLock) { this.aggregateEvents.add(event); + verifyRunning(); + } + } + + /** + * Starts up throttled event runner if not currently running. + */ + private void verifyRunning() { + synchronized (this.eventListLock) { if (!this.isRunning) { refreshExecutor.schedule(() -> fireEvents(), this.batchMillis, TimeUnit.MILLISECONDS); this.isRunning = true; @@ -75,6 +85,18 @@ public class DAOEventBatcher { } } + /** + * Queues an event to be fired as a part of a time-windowed batch. + * + * @param events The events. + */ + public void enqueueAllEvents(Collection events) { + synchronized (this.eventListLock) { + this.aggregateEvents.addAll(events); + verifyRunning(); + } + } + /** * Fires all events and clears batch. */ @@ -82,7 +104,7 @@ public class DAOEventBatcher { Collection evtsToFire; synchronized (this.eventListLock) { evtsToFire = this.aggregateEvents; - this.aggregateEvents = new ArrayList<>(); + this.aggregateEvents = new HashSet<>(); this.isRunning = false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java new file mode 100644 index 0000000000..f36cfe9ed7 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java @@ -0,0 +1,76 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; + + +public class TreeEvent implements DAOEvent { + private final DAOEvent daoEvent; + private final boolean determinate; + + public TreeEvent(DAOEvent daoEvent, boolean determinate) { + this.daoEvent = daoEvent; + this.determinate = determinate; + } + + public DAOEvent getDaoEvent() { + return daoEvent; + } + + public boolean isDeterminate() { + return determinate; + } + + @Override + public int hashCode() { + int hash = 3; + hash = 41 * hash + Objects.hashCode(this.daoEvent); + hash = 41 * hash + (this.determinate ? 1 : 0); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final TreeEvent other = (TreeEvent) obj; + if (this.determinate != other.determinate) { + return false; + } + if (!Objects.equals(this.daoEvent, other.daoEvent)) { + return false; + } + return true; + } + + @Override + public Type getType() { + return Type.TREE; + } + + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java new file mode 100644 index 0000000000..41c7ac5ad7 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java @@ -0,0 +1,115 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +public class TreeEventTimer { + + public interface TreeEventHandler { + + void handleEvents(Collection events, boolean determinate); + } + + private final Map eventTimeouts = new HashMap<>(); + private final Object timeoutLock = new Object(); + private ScheduledFuture cancellableFuture; + + private final TreeEventHandler eventsHandler; + private final long timeoutMillis; + private final long watchResolutionMillis; + + private final ScheduledThreadPoolExecutor timeoutExecutor + = new ScheduledThreadPoolExecutor(1, + new ThreadFactoryBuilder().setNameFormat(DAOEventBatcher.class.getName()).build()); + + public TreeEventTimer(TreeEventHandler eventsHandler, long timeoutMillis, long checkResolutionMillis) { + this.eventsHandler = eventsHandler; + this.timeoutMillis = timeoutMillis; + this.watchResolutionMillis = checkResolutionMillis; + } + + private long getCurTime() { + return System.currentTimeMillis(); + } + + private long getTimeoutTime() { + return getCurTime() + timeoutMillis; + } + + public void enqueueAll(List events) { + List updateToIndeterminate = new ArrayList<>(); + + synchronized (this.timeoutLock) { + boolean needsWatch = this.eventTimeouts.isEmpty(); + for (T event : events) { + // GVDTODO do we need to update all? + this.eventTimeouts.compute(event, (k, v) -> { + if (v == null) { + updateToIndeterminate.add(event); + } + return getTimeoutTime(); + }); + } + + if (needsWatch) { + this.cancellableFuture = this.timeoutExecutor.scheduleAtFixedRate( + () -> handleEventTimeouts(), + this.watchResolutionMillis, + this.watchResolutionMillis, + TimeUnit.MILLISECONDS); + } + } + + if (!updateToIndeterminate.isEmpty()) { + this.eventsHandler.handleEvents(updateToIndeterminate, false); + } + } + + private void handleEventTimeouts() { + long curTime = getCurTime(); + List toUpdate = new ArrayList<>(); + synchronized (this.timeoutLock) { + if (Thread.interrupted()) { + return; + } + + this.eventTimeouts.forEach((k, v) -> { + if (v >= curTime) { + toUpdate.add(k); + this.eventTimeouts.remove(k); + } + }); + + if (this.eventTimeouts.isEmpty()) { + this.cancellableFuture.cancel(true); + } + } + + this.eventsHandler.handleEvents(toUpdate, true); + } +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java index 60459768d1..d1f945148c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java @@ -93,35 +93,35 @@ public class AnalysisResultTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { @@ -328,10 +328,10 @@ public class AnalysisResultTypeFactory extends TreeChildFactory extends ChildFactory.Detachable implements Refresher { +public abstract class TreeChildFactory extends ChildFactory.Detachable { private static final Logger logger = Logger.getLogger(TreeChildFactory.class.getName()); - private static final Set INGEST_JOB_EVENTS_OF_INTEREST - = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED); - - private final RefreshThrottler refreshThrottler = new RefreshThrottler(this); - private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { - if (evt.getNewValue() instanceof DAOAggregateEvent) { + if (evt.getNewValue() instanceof DAOEvent) { DAOAggregateEvent aggEvt = (DAOAggregateEvent) evt.getNewValue(); for (DAOEvent daoEvt : aggEvt.getEvents()) { - if (isChildInvalidating(daoEvt)) { - updateData(); - break; + if (daoEvt instanceof TreeEvent) { + TreeEvent treeEvt = (TreeEvent) daoEvt; + if (isChildInvalidating(treeEvt.getDaoEvent())) { + try { + if (treeEvt.isDeterminate()) { + updateData(); + } else { + showIndeterminate(treeEvt); + } + } catch (ExecutionException ex) { + logger.log(Level.WARNING, "An error occurred while updating the data for this factory of type: " + this.getClass().getName(), ex); + } + break; + } } } } }; - private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, null); + private final PropertyChangeListener weakPcl = WeakListeners.propertyChange(pcl, MainDAO.getInstance().getTreeEventsManager()); private final Map> typeNodeMap = new MapMaker().weakValues().makeMap(); private TreeResultsDTO curResults = null; @@ -122,11 +124,6 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable extends ChildFactory.Detachable extends AbstractNode implements SelectionRespo protected TreeItemDTO getItemData() { return itemData; } + + /** + * Sets this node to an indeterminate state. + */ + void setIndeterminate() { + String baseName = this.itemData == null ? this.itemData.getDisplayName() : ""; + this.setDisplayName(baseName + "..."); + } /** * Sets the display name of the node to include the display name and count diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index a186c0add0..65a3e873e5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -99,22 +99,22 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileSizeCounts(this.dataSourceId); } - @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - AbstractFile evtFile = getFileInDataSourceFromEvt(evt, this.dataSourceId); - if (evtFile == null) { - return false; - } - - long size = evtFile.getSize(); - for (FileSizeFilter filter : FileSizeFilter.values()) { - if (size >= filter.getMinBound() || size < filter.getMaxBound()) { - return true; - } - } - - return false; - } +// @Override +// public boolean isRefreshRequired(PropertyChangeEvent evt) { +// AbstractFile evtFile = getFileInDataSourceFromEvt(evt, this.dataSourceId); +// if (evtFile == null) { +// return false; +// } +// +// long size = evtFile.getSize(); +// for (FileSizeFilter filter : FileSizeFilter.values()) { +// if (size >= filter.getMinBound() || size < filter.getMaxBound()) { +// return true; +// } +// } +// +// return false; +// } /** * Shows a file size tree node. @@ -164,10 +164,10 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileMimeCounts(null, this.dataSourceId); } - @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - return getFileInDataSourceFromEvt(evt, this.dataSourceId) != null; - } +// @Override +// public boolean isRefreshRequired(PropertyChangeEvent evt) { +// return getFileInDataSourceFromEvt(evt, this.dataSourceId) != null; +// } static class FileMimePrefixNode extends TreeNode { @@ -218,15 +218,15 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileMimeCounts(this.mimeTypePrefix, this.dataSourceId); } - @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - AbstractFile file = getFileInDataSourceFromEvt(evt, dataSourceId); - if (file == null || file.getMIMEType() == null) { - return false; - } - - return file.getMIMEType().toLowerCase().startsWith(this.mimeTypePrefix.toLowerCase()); - } +// @Override +// public boolean isRefreshRequired(PropertyChangeEvent evt) { +// AbstractFile file = getFileInDataSourceFromEvt(evt, dataSourceId); +// if (file == null || file.getMIMEType() == null) { +// return false; +// } +// +// return file.getMIMEType().toLowerCase().startsWith(this.mimeTypePrefix.toLowerCase()); +// } /** * Displays an individual suffix node in the tree (i.e. 'aac' underneath @@ -302,12 +302,12 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileExtCounts(this.childFilters, this.dataSourceId); } - @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { - AbstractFile file = getFileInDataSourceFromEvt(evt, this.dataSourceId); - return file != null && file.getNameExtension() != null && - this.childFilters.stream().anyMatch((filter) -> filter.getFilter().contains("." + file.getNameExtension().toLowerCase())); - } +// @Override +// public boolean isRefreshRequired(PropertyChangeEvent evt) { +// AbstractFile file = getFileInDataSourceFromEvt(evt, this.dataSourceId); +// return file != null && file.getNameExtension() != null && +// this.childFilters.stream().anyMatch((filter) -> filter.getFilter().contains("." + file.getNameExtension().toLowerCase())); +// } /** * Represents a file extension tree node that may or may not have child From 812c1e4808551af176d32f13ea80c2043d10ba09 Mon Sep 17 00:00:00 2001 From: apriestman Date: Tue, 23 Nov 2021 10:01:03 -0500 Subject: [PATCH 073/142] Fix counts. Cleanup --- .../datamodel/DataSourceFilesNode.java | 70 ------------------- .../datamodel/DataSourceGroupingNode.java | 5 -- .../autopsy/datamodel/DataSourcesNode.java | 1 - .../sleuthkit/autopsy/datamodel/HostNode.java | 3 - .../datamodel/FileSystemColumnUtils.java | 19 +++-- .../mainui/datamodel/FileSystemDAO.java | 30 ++++++-- .../mainui/datamodel/TreeResultsDTO.java | 3 +- .../autopsy/mainui/nodes/NodeIconUtil.java | 8 ++- 8 files changed, 46 insertions(+), 93 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java index af5db5b700..872269cfbd 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceFilesNode.java @@ -70,7 +70,6 @@ public class DataSourceFilesNode extends DisplayableItemNode { } public DataSourceFilesNode(long dsObjId) { - //super(Children.create(new DataSourcesNodeChildren(dsObjId), true), Lookups.singleton(NAME)); super(Children.create(new FileSystemFactory.DataSourceFactory(dsObjId), true), Lookups.singleton(NAME)); displayName = (dsObjId > 0) ? NbBundle.getMessage(DataSourceFilesNode.class, "DataSourcesNode.group_by_datasource.name") : NAME; init(); @@ -87,75 +86,6 @@ public class DataSourceFilesNode extends DisplayableItemNode { return getClass().getName(); } - /* - * Custom Keys implementation that listens for new data sources being added. - */ - public static class DataSourcesNodeChildren extends AbstractContentChildren { - - private static final Logger logger = Logger.getLogger(DataSourcesNodeChildren.class.getName()); - private final long datasourceObjId; - - List currentKeys; - - public DataSourcesNodeChildren() { - this(0); - } - - public DataSourcesNodeChildren(long dsObjId) { - super("ds_" + Long.toString(dsObjId)); - this.currentKeys = new ArrayList<>(); - this.datasourceObjId = dsObjId; - } - - private final PropertyChangeListener pcl = new PropertyChangeListener() { - @Override - public void propertyChange(PropertyChangeEvent evt) { - String eventType = evt.getPropertyName(); - if (eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) { - refresh(true); - } - } - }; - - @Override - protected void onAdd() { - Case.addEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl); - } - - @Override - protected void onRemove() { - Case.removeEventTypeSubscriber(EnumSet.of(Case.Events.DATA_SOURCE_ADDED), pcl); - currentKeys.clear(); - } - - @Override - protected List makeKeys() { - try { - if (datasourceObjId == 0) { - currentKeys = Case.getCurrentCaseThrows().getDataSources(); - } else { - Content content = Case.getCurrentCaseThrows().getSleuthkitCase().getDataSource(datasourceObjId); - currentKeys = new ArrayList<>(Arrays.asList(content)); - } - - Collections.sort(currentKeys, new Comparator() { - @Override - public int compare(Content content1, Content content2) { - String content1Name = content1.getName().toLowerCase(); - String content2Name = content2.getName().toLowerCase(); - return content1Name.compareTo(content2Name); - } - - }); - - } catch (TskCoreException | NoCurrentCaseException | TskDataException ex) { - logger.log(Level.SEVERE, "Error getting data sources: {0}", ex.getMessage()); // NON-NLS - } - - return currentKeys; - } - } - @Override public boolean isLeafTypeNode() { return false; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java index 05e761d3fc..16280fee76 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGroupingNode.java @@ -22,12 +22,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.Optional; import java.util.logging.Level; -import org.openide.nodes.Children; import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LocalFilesDataSource; @@ -50,9 +48,6 @@ class DataSourceGroupingNode extends DisplayableItemNode { super(Optional.ofNullable(createDSGroupingNodeChildren(dataSource)) .orElse(new RootContentChildren(Arrays.asList(Collections.EMPTY_LIST))), Lookups.singleton(dataSource)); - // TODO other part - //super(Children.create(new FileSystemFactory(dataSource.getId()), true), - // Lookups.singleton(dataSource)); if (dataSource instanceof Image) { Image image = (Image) dataSource; diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java index 5c0c9720b4..e735eca3d9 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/DataSourcesNode.java @@ -36,7 +36,6 @@ import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.mainui.nodes.FileSystemFactory; import org.sleuthkit.datamodel.TskCoreException; /** diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java index 8b71494b76..d26224f817 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/HostNode.java @@ -29,7 +29,6 @@ import java.util.logging.Level; import java.util.stream.Collectors; import javax.swing.Action; import org.openide.nodes.ChildFactory; - import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Sheet; @@ -222,9 +221,7 @@ public class HostNode extends DisplayableItemNode implements SelectionResponder{ * @param hosts The HostDataSources key. */ HostNode(HostDataSources hosts) { - //super(Children.create(new FileSystemFactory(dsObjId), true), Lookups.singleton(NAME)); this(Children.create(new FileSystemFactory(hosts.getHost()), true), hosts.getHost()); - //this(Children.create(new HostGroupingChildren(HOST_DATA_SOURCES, hosts.getHost()), true), hosts.getHost()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index 2dfd75254c..2a9a17dbb9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -23,7 +23,6 @@ import java.sql.SQLException; import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.logging.Level; @@ -508,7 +507,9 @@ public class FileSystemColumnUtils { /** * Get the content that should be displayed in the table based on the given object. * Algorithm: - * - If content is already displayable, return it + * - If content is known and known files are being hidden, return an empty list + * - If content is a slack file and slack files are being hidden, return an empty list + * - If content is a displayable type, return it * - If content is a volume system, return its displayable children * - If content is a file system, return the displayable children of the root folder * - If content is the root folder, return the displayable children of the root folder @@ -538,12 +539,16 @@ public class FileSystemColumnUtils { } /** - * Get the displayable content children in common between the table and tree views. - * Advances past content types we do not display (volume systems, file systems, root folder). + * Get the content that should be displayed in the table based on the given object. + * Algorithm: + * - If content is a displayable type, return it + * - If content is a volume system, return its displayable children + * - If content is a file system, return the displayable children of the root folder + * - If content is the root folder, return the displayable children of the root folder + * + * @param content The base content. * - * @param content The content to get the children of. - * - * @return List of displayable content children. + * @return List of content to add to the table/tree. * * @throws TskCoreException */ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index ed85b6565d..af293dc8a1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -22,10 +22,8 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.util.ArrayList; -import java.util.Collections; import java.util.Comparator; import java.util.List; -import java.util.Objects; import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -294,6 +292,15 @@ public class FileSystemDAO { return searchParamsCache.get(searchParams, () -> fetchHostsForTable(searchParams)); } + /** + * Get all data sources belonging to a given host. + * + * @param host The host. + * + * @return Results containing all data sources for the given host. + * + * @throws ExecutionException + */ public TreeResultsDTO getDataSourcesForHost(Host host) throws ExecutionException { try { List> treeItemRows = new ArrayList<>(); @@ -312,6 +319,15 @@ public class FileSystemDAO { } } + /** + * Create results for a single given data source ID (not its children). + * + * @param dataSourceObjId The data source object ID. + * + * @return Results containing just this data source. + * + * @throws ExecutionException + */ public TreeResultsDTO getSingleDataSource(long dataSourceObjId) throws ExecutionException { try { List> treeItemRows = new ArrayList<>(); @@ -349,7 +365,7 @@ public class FileSystemDAO { Long countForNode = null; if ((child instanceof AbstractFile) && ! (child instanceof LocalFilesDataSource)) { - countForNode = new Long(child.getChildrenCount()); // TODO does not account for hidden children + countForNode = getContentForTable(new FileSystemContentSearchParam(child.getId()), 0, null, false).getTotalResultsCount(); } treeItemRows.add(new TreeResultsDTO.TreeItemDTO<>( child.getClass().getSimpleName(), @@ -366,8 +382,14 @@ public class FileSystemDAO { } } + /** + * Get display name for the given content. + * + * @param content The content. + * + * @return Display name for the content. + */ private String getNameForContent(Content content) { - // Currently the only special case is for volumes if (content instanceof Volume) { return FileSystemColumnUtils.getVolumeDisplayName((Volume)content); } else if (content instanceof AbstractFile) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 923ed0390a..48c44d2275 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -19,7 +19,6 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; -import java.util.Optional; /** * A list of items to display in the tree. @@ -76,7 +75,7 @@ public class TreeResultsDTO { this.count = count; this.typeData = typeData; } - + /** * @return The display name of this row. */ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java index fc7931da93..059db7b50c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/NodeIconUtil.java @@ -5,7 +5,13 @@ */ package org.sleuthkit.autopsy.mainui.nodes; -import org.sleuthkit.datamodel.*; // TODO +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.LocalFilesDataSource; +import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.Volume; /** * Consolidates node paths shared between the result view table and the tree. From ae437011c50cd220c9819a84036d7f9b26299a2a Mon Sep 17 00:00:00 2001 From: apriestman Date: Tue, 23 Nov 2021 10:08:42 -0500 Subject: [PATCH 074/142] Restore bundles --- .../corecomponents/Bundle.properties-MERGED | 30 ++++------- .../datamodel/Bundle.properties-MERGED | 50 ++----------------- 2 files changed, 14 insertions(+), 66 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED index 0636340b0b..07a42f5e19 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED @@ -35,6 +35,9 @@ DataArtifactContentViewer.failedToGetSourcePath.message=Failed to get source fil DataContentViewerHex.copyingFile=Copying file to open in HxD... DataContentViewerHex.launchError=Unable to launch HxD Editor. Please specify the HxD install location in Tools -> Options -> External Viewer DataContentViewerHex_loading_text=Loading hex from file... +# {0} - pageNumber +# {1} - pageCount +DataResultPanel_pageIdxOfCount={0} of {1} DataResultViewerTable.commentRender.name=C DataResultViewerTable.commentRender.toolTip=C(omments) indicates whether the item has a comment DataResultViewerTable.commentRenderer.crAndTagComment.toolTip=Comments exist both in Central Repository and on associated tag(s) @@ -45,12 +48,6 @@ DataResultViewerTable.countRender.name=O DataResultViewerTable.countRender.toolTip=O(ccurrences) indicates the number of data sources containing the item in the Central Repository DataResultViewerTable.exportCSVButtonActionPerformed.empty=No data to export DataResultViewerTable.firstColLbl=Name -DataResultViewerTable.goToPageTextField.err=Invalid page number -# {0} - totalPages -DataResultViewerTable.goToPageTextField.msgDlg=Please enter a valid page number between 1 and {0} -# {0} - currentPage -# {1} - totalPages -DataResultViewerTable.pageNumbers.curOfTotal={0} of {1} DataResultViewerTable.scoreRender.name=S DataResultViewerTable.scoreRender.toolTip=S(core) indicates whether the item is interesting or notable DataResultViewerTable.title=Table @@ -98,16 +95,9 @@ DataArtifactContentViewer.pageLabel.text=Result: AdvancedConfigurationDialog.applyButton.text=OK DataContentViewerHex.goToPageTextField.text= DataContentViewerHex.goToPageLabel.text=Go to Page: -DataResultViewerThumbnail.pageLabel.text=Page: -DataResultViewerThumbnail.pagesLabel.text=Pages: -DataResultViewerThumbnail.pagePrevButton.text= -DataResultViewerThumbnail.pageNextButton.text= DataResultViewerThumbnail.imagesLabel.text=Images: DataResultViewerThumbnail.imagesRangeLabel.text=- -DataResultViewerThumbnail.pageNumLabel.text=- DataResultViewerThumbnail.filePathLabel.text=\ \ \ -DataResultViewerThumbnail.goToPageLabel.text=Go to Page: -DataResultViewerThumbnail.goToPageField.text= AdvancedConfigurationDialog.cancelButton.text=Cancel DataArtifactContentViewer.waitText=Retrieving and preparing data, please wait... DataArtifactContentViewer.errorText=Error retrieving result @@ -232,14 +222,7 @@ ExternalViewerGlobalSettingsPanel.deleteRuleButton.text_1=Delete Rule ExternalViewerGlobalSettingsPanel.externalViewerTitleLabel.text_1=Set aplication viewer to use for files with specific mime types/extensions: ExternalViewerGlobalSettingsPanel.jTable1.columnModel.title1_1=Application ExternalViewerGlobalSettingsPanel.jTable1.columnModel.title0_1=Mime type/Extension -DataResultViewerTable.gotoPageTextField.text= DataResultViewerTable.gotoPageLabel.AccessibleContext.accessibleName= -DataResultViewerTable.gotoPageLabel.text=Go to Page: -DataResultViewerTable.pageNextButton.text= -DataResultViewerTable.pagePrevButton.text= -DataResultViewerTable.pagesLabel.text=Pages: -DataResultViewerTable.pageNumLabel.text= -DataResultViewerTable.pageLabel.text=Page: DataResultViewerTable.exportCSVButton.text=Save Table as CSV MultiUserSettingsPanel.tbSolr4Hostname.toolTipText=Solr 4 Hostname or IP Address MultiUserSettingsPanel.tbSolr4Port.toolTipText=Solr 4 Port Number @@ -321,3 +304,10 @@ AutopsyOptionsPanel.heapDumpFileField.text= AutopsyOptionsPanel.heapDumpBrowseButton.text=Browse AutopsyOptionsPanel.heapFileLabel.text=Custom Heap Dump Location: AutopsyOptionsPanel.heapFieldValidationLabel.text= +DataResultPanel.gotoPageTextField.text= +DataResultPanel.gotoPageLabel.text=Go to Page: +DataResultPanel.pageLabel.text=Page: +DataResultPanel.pagesLabel.text=Pages: +DataResultPanel.pageNumLabel.text= +DataResultPanel.pageNextButton.text= +DataResultPanel.pagePrevButton.text= diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED index d753a6e329..40a7fc2c77 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED @@ -102,18 +102,12 @@ BlackboardArtifactNode_getViewSrcContentAction_type_DataArtifact=Data Artifact BlackboardArtifactNode_getViewSrcContentAction_type_File=File BlackboardArtifactNode_getViewSrcContentAction_type_OSAccount=OS Account BlackboardArtifactNode_getViewSrcContentAction_type_unknown=Item -BlackboardArtifactTagNode.createSheet.userName.text=User Name -BlackboardArtifactTagNode.viewSourceArtifact.text=View Source Result Category.five=CAT-5: Non-pertinent Category.four=CAT-4: Exemplar/Comparison (Internal Use Only) Category.one=CAT-1: Child Exploitation (Illegal) Category.three=CAT-3: CGI/Animation (Child Exploitive) Category.two=CAT-2: Child Exploitation (Non-Illegal/Age Difficult) Category.zero=CAT-0: Uncategorized -ContentTagNode.createSheet.artifactMD5.displayName=MD5 Hash -ContentTagNode.createSheet.artifactMD5.name=MD5 Hash -ContentTagNode.createSheet.origFileName=Original Name -ContentTagNode.createSheet.userName.text=User Name DataArtifacts_name=Data Artifacts DataSourcesHostsNode_name=Data Sources DeletedContent.allDelFilter.text=All @@ -130,19 +124,12 @@ FileNode.getActions.openInExtViewer.text=Open in External Viewer Ctrl+E FileNode.getActions.searchFilesSameMD5.text=Search for files with the same MD5 hash FileNode.getActions.viewFileInDir.text=View File in Directory FileNode.getActions.viewInNewWin.text=View Item in New Window -FileTypeExtensionFilters.tskDatabaseFilter.text=Databases FileTypes.bgCounting.placeholder=\ (counting...) FileTypes.createSheet.name.desc=no description FileTypes.createSheet.name.displayName=Name FileTypes.createSheet.name.name=Name FileTypes.name.text=File Types FileTypesByMimeType.name.text=By MIME Type -FileTypesByMimeTypeNode.createSheet.mediaSubtype.desc=no description -FileTypesByMimeTypeNode.createSheet.mediaSubtype.displayName=Subtype -FileTypesByMimeTypeNode.createSheet.mediaSubtype.name=Subtype -FileTypesByMimeTypeNode.createSheet.mediaType.desc=no description -FileTypesByMimeTypeNode.createSheet.mediaType.displayName=Type -FileTypesByMimeTypeNode.createSheet.mediaType.name=Type GetSCOTask.occurrences.defaultDescription=No correlation properties found GetSCOTask.occurrences.multipleProperties=Multiple different correlation properties exist for this result HostGroupingNode_unknownHostNode_title=Unknown Host @@ -313,10 +300,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window ImageNode.createSheet.name.name=Name ImageNode.createSheet.name.displayName=Name ImageNode.createSheet.name.desc=no description -Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\! -Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\! -Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0} -Installer.tskLibErr.err=Fatal Error\! +Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null! +Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""! +Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0} +Installer.tskLibErr.err=Fatal Error! InterestingHits.interestingItems.text=INTERESTING ITEMS InterestingHits.displayName.text=Interesting Items InterestingHits.createSheet.name.name=Name @@ -347,33 +334,6 @@ OpenReportAction.actionPerformed.NoOpenInEditorSupportMessage=This platform (ope OpenReportAction.actionPerformed.MissingReportFileMessage=The report file no longer exists. OpenReportAction.actionPerformed.ReportFileOpenPermissionDeniedMessage=Permission to open the report file was denied. OsAccount_listNode_name=OS Accounts -OsAccounts.createSheet.comment.displayName=C -OsAccounts.createSheet.comment.name=C -# {0} - occurrenceCount -OsAccounts.createSheet.count.description=There were {0} datasource(s) found with occurrences of the OS Account correlation value -OsAccounts.createSheet.count.displayName=O -OsAccounts.createSheet.count.hashLookupNotRun.description=Hash lookup had not been run on this file when the column was populated -OsAccounts.createSheet.count.name=O -OsAccounts.createSheet.score.displayName=S -OsAccounts.createSheet.score.name=S -OsAccounts_accountHostNameProperty_desc=OS Account Host Name -OsAccounts_accountHostNameProperty_displayName=Host -OsAccounts_accountHostNameProperty_name=HostName -OsAccounts_accountNameProperty_desc=Os Account name -OsAccounts_accountNameProperty_displayName=Name -OsAccounts_accountNameProperty_name=Name -OsAccounts_accountRealmNameProperty_desc=OS Account Realm Name -OsAccounts_accountRealmNameProperty_displayName=Realm Name -OsAccounts_accountRealmNameProperty_name=RealmName -OsAccounts_accountScopeNameProperty_desc=OS Account Scope Name -OsAccounts_accountScopeNameProperty_displayName=Scope -OsAccounts_accountScopeNameProperty_name=ScopeName -OsAccounts_createdTimeProperty_desc=OS Account Creation Time -OsAccounts_createdTimeProperty_displayName=Creation Time -OsAccounts_createdTimeProperty_name=creationTime -OsAccounts_loginNameProperty_desc=OS Account login name -OsAccounts_loginNameProperty_displayName=Login Name -OsAccounts_loginNameProperty_name=loginName PersonGroupingNode_actions_delete=Delete Person PersonGroupingNode_actions_rename=Rename Person... PersonGroupingNode_createSheet_nameProperty=Name @@ -421,8 +381,6 @@ TagNameNode.bbArtTagTypeNodeKey.text=Result Tags TagNameNode.bookmark.text=Bookmark TagNameNode.createSheet.name.name=Name TagNameNode.createSheet.name.displayName=Name -TagNode.propertySheet.origName=Original Name -TagNode.propertySheet.origNameDisplayName=Original Name TagsNode.displayName.text=Tags TagsNode.createSheet.name.name=Name TagsNode.createSheet.name.displayName=Name From eba331f82f2554d8431d756f11a2b2b086401e33 Mon Sep 17 00:00:00 2001 From: rcordovano Date: Tue, 23 Nov 2021 12:02:10 -0500 Subject: [PATCH 075/142] 7895 CR data artifact ingest module --- .../experimental/configuration/Bundle.properties-MERGED | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED index 86fd175181..854c57bed1 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/Bundle.properties-MERGED @@ -65,15 +65,19 @@ DayOfTheWeekRenderer_Tuesday_Label=Tuesday DayOfTheWeekRenderer_Wednesday_Label=Wednesday GeneralOptionsPanelController.moduleErr.msg=A module caused an error listening to GeneralOptionsPanelController updates. See log to determine which module. Some data could be incomplete. GeneralOptionsPanelController.moduleErr=Module Error +# {0} - errorMessage MultiUserTestTool.criticalError=Critical error running data source processor on test data source: {0} MultiUserTestTool.errorStartingIngestJob=Ingest manager error while starting ingest job +# {0} - cancellationReason MultiUserTestTool.ingestCancelled=Ingest cancelled due to {0} MultiUserTestTool.ingestSettingsError=Failed to analyze data source due to ingest settings errors MultiUserTestTool.noContent=Test data source failed to produce content +# {0} - serviceName MultiUserTestTool.serviceDown=Multi User service is down: {0} MultiUserTestTool.startupError=Failed to analyze data source due to ingest job startup error MultiUserTestTool.unableAddFileAsDataSource=Unable to add test file as data source to case MultiUserTestTool.unableCreatFile=Unable to create a file in case output directory +# {0} - serviceName MultiUserTestTool.unableToCheckService=Unable to check Multi User service state: {0} MultiUserTestTool.unableToCreateCase=Unable to create case MultiUserTestTool.unableToInitializeDatabase=Case database was not successfully initialized From ac82d6d4a18725cf435480dfdf622f9cecaa2e3d Mon Sep 17 00:00:00 2001 From: apriestman Date: Tue, 23 Nov 2021 14:47:54 -0500 Subject: [PATCH 076/142] Don't hide the .. directory --- .../autopsy/mainui/datamodel/FileSystemColumnUtils.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index 82648f4db2..476645984c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -177,6 +177,12 @@ class FileSystemColumnUtils { */ static boolean isDisplayable(Content content) { if (content instanceof AbstractFile) { + // .. directories near the top of the directory structure can + // pass the isRoot() check, so first check if the name is empty + // (real root directories will have a blank name field) + if (!content.getName().isEmpty()) { + return true; + } return ! ((AbstractFile)content).isRoot(); } return (getContentType(content) != ContentType.UNSUPPORTED); From 57013e5a46b7bbaf92016e15f438994945dc45fd Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 23 Nov 2021 16:11:05 -0500 Subject: [PATCH 077/142] updates --- .../autopsy/mainui/datamodel/AbstractDAO.java | 27 ++- .../mainui/datamodel/AnalysisResultDAO.java | 88 ++++++--- .../datamodel/BlackboardArtifactDAO.java | 2 +- .../mainui/datamodel/CommAccountsDAO.java | 2 +- .../mainui/datamodel/DataArtifactDAO.java | 69 ++++--- .../mainui/datamodel/FileSystemDAO.java | 2 +- .../autopsy/mainui/datamodel/MainDAO.java | 176 ++++++++++++------ .../mainui/datamodel/OsAccountsDAO.java | 2 +- .../autopsy/mainui/datamodel/TagsDAO.java | 2 +- .../mainui/datamodel/TreeResultsDTO.java | 14 +- .../autopsy/mainui/datamodel/ViewsDAO.java | 8 +- .../datamodel/events/AnalysisResultEvent.java | 6 +- .../events/AnalysisResultSetEvent.java | 6 +- .../events/BlackboardArtifactEvent.java | 22 ++- .../datamodel/events/DAOAggregateEvent.java | 6 +- .../datamodel/events/DAOEventBatcher.java | 30 ++- .../datamodel/events/DataArtifactEvent.java | 6 +- .../datamodel/events/KeywordHitEvent.java | 7 +- .../mainui/datamodel/events/TreeEvent.java | 27 +-- ...entTimer.java => TreeEventTimedCache.java} | 69 +++---- .../mainui/nodes/DataArtifactTypeFactory.java | 46 ++--- .../mainui/nodes/TreeChildFactory.java | 97 +++++++--- 22 files changed, 437 insertions(+), 277 deletions(-) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/{TreeEventTimer.java => TreeEventTimedCache.java} (50%) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index de72890777..801f0db3b6 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -21,7 +21,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import java.beans.PropertyChangeEvent; import java.util.Collection; -import java.util.List; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** * Internal methods that DAOs implement. @@ -39,16 +39,27 @@ abstract class AbstractDAO { * and returning one or more DAOEvents that should be broadcasted to the * views. * - * This method is responsible for minimizing the number of DAOEvents that - * are returned. For example, if there are 100 Autopsy events for the same - * type of data artifact in the same data source, then only a single - * DataArtifact event needs to be returned. - * - * @param evt The Autopsy events that recently came in from Ingest/Case. + * @param evt The Autopsy event that recently came in from Ingest/Case. * * @return The list of DAOEvents that should be broadcasted to the views or * an empty list if the Autopsy events are irrelevant to this DAO. */ - abstract List handleAutopsyEvent(Collection evt); + abstract Collection processEvent(PropertyChangeEvent evt); + /** + * Any events that are delayed or batched are flushed and returned. + * + * @return The flushed events that were delayed and batched. + */ + abstract Collection flushEvents(); + + /** + * Returns any categories that require a tree refresh. For instance, if web + * cache and web bookmarks haven't updated recently, and are currently set + * to an indeterminate amount (i.e. "..."), then broadcast an event forcing + * tree to update to a determinate count. + * + * @return The categories that require a tree refresh. + */ + abstract Collection shouldRefreshTree(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 8e412ae3af..336db9a8e6 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -44,11 +44,14 @@ import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; +import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimedCache; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AnalysisResult; @@ -136,12 +139,22 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public static Set getIgnoredTreeTypes() { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } + + @SuppressWarnings("deprecation") + private static final Set STANDARD_SET_TYPES = ImmutableSet.of( + BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID(), + BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(), + BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID(), + BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID() + ); // TODO We can probably combine all the caches at some point private final Cache, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); + private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { SleuthkitCase skCase = getCase(); @@ -280,7 +293,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } AnalysisResultEvent analysisResultEvt = (AnalysisResultEvent) eventData; - return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactTypeId() + return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactType().getTypeID() && (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId()); } @@ -353,7 +366,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new AnalysisResultSearchParam(entry.getKey(), dataSourceId), entry.getKey().getTypeID(), entry.getKey().getDisplayName(), - TreeCount.getDeterminate(entry.getValue())); + entry.getValue()); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -452,7 +465,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new AnalysisResultSetSearchParam(type, dataSourceId, entry.getKey()), entry.getKey() == null ? 0 : entry.getKey(), entry.getKey() == null ? nullSetName : entry.getKey(), - TreeCount.getDeterminate(entry.getValue())); + entry.getValue()); }) .collect(Collectors.toList()); @@ -595,7 +608,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new KeywordSearchTermParams(setName, searchTerm, searchType, hasChildren, dataSourceId), searchTermModified, searchTermModified, - TreeCount.getDeterminate(count) + count )); } } catch (SQLException ex) { @@ -685,7 +698,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId), keyword, keyword == null ? "" : keyword, - TreeCount.getDeterminate(count))); + count)); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); @@ -703,42 +716,40 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { this.analysisResultCache.invalidateAll(); this.keywordHitCache.invalidateAll(); this.setHitCache.invalidateAll(); + this.flushEvents(); } @Override - List handleAutopsyEvent(Collection evts) { + Collection processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. Map> analysisResultMap = new HashMap<>(); Map, Set> setMap = new HashMap<>(); Map> keywordHitsMap = new HashMap<>(); - for (PropertyChangeEvent evt : evts) { - ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); - if (dataEvt != null) { - for (BlackboardArtifact art : dataEvt.getArtifacts()) { - try { - if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { - // GVDTODO handle keyword hits - } else if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() - || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() - || art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID()) { - BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); - String setName = setAttr == null ? null : setAttr.getValueString(); - setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); + ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + if (dataEvt != null) { + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + try { + if (art.getArtifactTypeID() == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { + // GVDTODO handle keyword hits + } else if (STANDARD_SET_TYPES.contains(art.getArtifactTypeID())) { + BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); + String setName = setAttr == null ? null : setAttr.getValueString(); + setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); - } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { - analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); - } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to fetch necessary information for artifact id: " + art.getId(), ex); + } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { + analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch necessary information for artifact id: " + art.getId(), ex); } } } + // don't continue if no relevant items found if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { return Collections.emptyList(); @@ -746,7 +757,11 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { clearRelevantCacheEntries(analysisResultMap, setMap); - return getDAOEvents(analysisResultMap, setMap); + List daoEvents = getDAOEvents(analysisResultMap, setMap); + Collection treeEvents = this.treeCache.enqueueAll(daoEvents); + return Stream.of(daoEvents, treeEvents) + .flatMap(lst -> lst.stream()) + .collect(Collectors.toList()); } /** @@ -761,13 +776,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { * * @return The list of dao events. */ - private List getDAOEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { + private List getDAOEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { // invalidate cache entries that are affected by events - // GVDTODO handle concurrency issues that may arise - Stream analysisResultEvts = analysisResultMap.entrySet().stream() + Stream analysisResultEvts = analysisResultMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); - Stream analysisResultSetEvts = resultsWithSetMap.entrySet().stream() + Stream analysisResultSetEvts = resultsWithSetMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId))); // GVDTODO handle keyword hits @@ -809,6 +823,18 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); } + @Override + Collection flushEvents() { + return this.treeCache.flushEvents(); + } + + @Override + Collection shouldRefreshTree() { + return this.treeCache.getEventTimeouts().stream() + .map(daoEvt -> new TreeEvent(daoEvt, true)) + .collect(Collectors.toList()); + } + /** * Handles fetching and paging of analysis results. */ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java index c5ea115a5e..3c06da7ddc 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/BlackboardArtifactDAO.java @@ -11,7 +11,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -149,6 +148,7 @@ abstract class BlackboardArtifactDAO extends AbstractDAO { protected static Set getIgnoredTreeTypes() { return IGNORED_TYPES; } + TableData createTableData(BlackboardArtifact.Type artType, List arts) throws TskCoreException, NoCurrentCaseException { Map> artifactAttributes = new HashMap<>(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index db9b9bb2ee..b2d9966e8b 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -154,7 +154,7 @@ public class CommAccountsDAO extends AbstractDAO { } @Override - List handleAutopsyEvent(Collection evts) { + List processEvent(Collection evts) { // maps account type to the data sources affected Map> commAccountsAffected = new HashMap<>(); try { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index c89d1cc4b8..fb95f9bc75 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -29,8 +29,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -40,7 +38,12 @@ import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; import java.util.logging.Level; +import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimedCache; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -65,7 +68,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return instance; } - + /** * @return The set of types that are not shown in the tree. */ @@ -73,6 +76,8 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } + + private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -120,7 +125,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return false; } else { DataArtifactEvent dataArtEvt = (DataArtifactEvent) eventData; - return key.getArtifactType().getTypeID() == dataArtEvt.getArtifactTypeId() + return key.getArtifactType().getTypeID() == dataArtEvt.getArtifactType().getTypeID() && (key.getDataSourceId() == null || (key.getDataSourceId() == dataArtEvt.getDataSourceId())); } } @@ -152,7 +157,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { new DataArtifactSearchParam(entry.getKey(), dataSourceId), entry.getKey().getTypeID(), entry.getKey().getDisplayName(), - TreeCount.getDeterminate(entry.getValue())); + entry.getValue()); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -168,27 +173,30 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override void clearCaches() { this.dataArtifactCache.invalidateAll(); + this.flushEvents(); } @Override - List handleAutopsyEvent(Collection evts) { + List processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. - Map> artifactTypeDataSourceMap = new HashMap<>(); - evts.stream() - .map(evt -> DAOEventUtils.getModuleDataFromEvt(evt)) - .filter(dataEvt -> dataEvt != null) - .flatMap(dataEvt -> dataEvt.getArtifacts().stream()) - .forEach((art) -> { + ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + if (evt == null) { + return Collections.emptyList(); + } + + Map> artifactTypeDataSourceMap = dataEvt.getArtifacts().stream() + .map((art) -> { try { if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { - artifactTypeDataSourceMap - .computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); + return Pair.of(art.getType(), art.getDataSourceObjectID()); } } catch (TskCoreException ex) { logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); } - }); + return null; + }) + .filter(pr -> pr != null) + .collect(Collectors.groupingBy(pr -> pr.getKey(), Collectors.mapping(pr -> pr.getValue(), Collectors.toSet()))); // don't do anything else if no relevant events if (artifactTypeDataSourceMap.isEmpty()) { @@ -208,15 +216,34 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { }); // gather dao events based on artifacts - List toRet = new ArrayList<>(); - for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { - int artTypeId = entry.getKey(); + List dataArtifactEvents = new ArrayList<>(); + for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { + BlackboardArtifact.Type artType = entry.getKey(); for (Long dsObjId : entry.getValue()) { - toRet.add(new DataArtifactEvent(artTypeId, dsObjId)); + DataArtifactEvent newEvt = new DataArtifactEvent(artType, dsObjId); + dataArtifactEvents.add(newEvt); } } + + List newTreeEvents = this.treeCache.enqueueAll(dataArtifactEvents).stream() + .map(daoEvt -> new TreeEvent(new DataArtifactSearchParam(daoEvt.getArtifactType(), daoEvt.getDataSourceId()), false)) + .collect(Collectors.toList()); + + return Stream.of(dataArtifactEvents, newTreeEvents) + .flatMap((lst) -> lst.stream()) + .collect(Collectors.toList()); + } - return toRet; + @Override + Collection flushEvents() { + return this.treeCache.flushEvents(); + } + + @Override + Collection shouldRefreshTree() { + return this.treeCache.getEventTimeouts().stream() + .map(dataEvt -> new TreeEvent(dataEvt, true)) + .collect(Collectors.toList()); } /* diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 91c22ec149..f35b46b48d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -355,7 +355,7 @@ public class FileSystemDAO extends AbstractDAO { } @Override - List handleAutopsyEvent(Collection evts) { + List processEvent(Collection evts) { Set affectedPersons = new HashSet<>(); Set affectedHosts = new HashSet<>(); Set affectedParentContent = new HashSet<>(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 69a4ad5e86..c53d9dd9ca 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -22,24 +22,27 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventBatcher; import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.EnumSet; import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.prefs.PreferenceChangeListener; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.apache.commons.collections4.CollectionUtils; import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.core.UserPreferences; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; -import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimer; /** * Main entry point for DAO for providing data to populate the data results @@ -47,6 +50,13 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimer; */ public class MainDAO extends AbstractDAO { + private static final Logger logger = Logger.getLogger(MainDAO.class.getName()); + + private static final Set INGEST_JOB_EVENTS = EnumSet.of( + IngestManager.IngestJobEvent.COMPLETED, + IngestManager.IngestJobEvent.CANCELLED + ); + private static final Set INGEST_MODULE_EVENTS = EnumSet.of( IngestManager.IngestModuleEvent.CONTENT_CHANGED, IngestManager.IngestModuleEvent.DATA_ADDED, @@ -60,16 +70,16 @@ public class MainDAO extends AbstractDAO { Case.Events.OS_ACCT_INSTANCES_ADDED.toString() ); - private static final long MILLIS_BATCH = 5 * 1000; - private static final long TREE_TIMEOUT_MILLIS = 2 * 60 * 1000; - private static final long TREE_CHECK_RESOLUTION_MILLIS = 10 * 1000; + private static final long WATCH_RESOLUTION_MILLIS = 30 * 1000; + + private static final long RESULT_BATCH_MILLIS = 5 * 1000; private static MainDAO instance = null; public synchronized static MainDAO getInstance() { if (instance == null) { instance = new MainDAO(); - instance.register(); + instance.init(); } return instance; @@ -82,10 +92,10 @@ public class MainDAO extends AbstractDAO { if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { this.clearCaches(); } else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) { - enqueueAutopsyEvent(evt); + handleEvent(evt, false); } else { // handle case events immediately - handleAutopsyEvent(Arrays.asList(evt)); + handleEvent(evt, true); } }; @@ -100,20 +110,25 @@ public class MainDAO extends AbstractDAO { * The ingest module event listener. */ private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - enqueueAutopsyEvent(evt); + handleEvent(evt, false); }; + /** + * The ingest job event listener. + */ + private final PropertyChangeListener ingestJobEventListener = (evt) -> { + handleEventFlush(); + }; + + private final ScheduledThreadPoolExecutor timeoutExecutor + = new ScheduledThreadPoolExecutor(1, + new ThreadFactoryBuilder().setNameFormat(MainDAO.class.getName()).build()); + private final PropertyChangeManager resultEventsManager = new PropertyChangeManager(); private final PropertyChangeManager treeEventsManager = new PropertyChangeManager(); private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>( - (evts) -> resultEventsManager.firePropertyChange("DATA_CHANGE", null, new DAOAggregateEvent(evts)), MILLIS_BATCH); - - private final TreeEventTimer treeEventTimer = new TreeEventTimer<>( - (evts, determinate) -> fireTreeEvents(evts, determinate), - TREE_TIMEOUT_MILLIS, - TREE_CHECK_RESOLUTION_MILLIS - ); + (evts) -> fireResultEvts(evts), RESULT_BATCH_MILLIS); private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance(); private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance(); @@ -161,39 +176,6 @@ public class MainDAO extends AbstractDAO { return commAccountsDAO; } - @Override - void clearCaches() { - allDAOs.forEach((subDAO) -> subDAO.clearCaches()); - } - - // TODO breakup - @Override - List handleAutopsyEvent(Collection evt) { - return allDAOs.stream() - .map(subDAO -> subDAO.handleAutopsyEvent(evt)) - .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) - .collect(Collectors.toList()); - } - - private void fireTreeEvents(Collection evts, boolean determinate) { - List treeEvts = evts.stream() - .map((daoEvt) -> new TreeEvent(daoEvt, determinate)) - .collect(Collectors.toList()); - - treeEventsManager.firePropertyChange("TREE_CHANGE", null, new DAOAggregateEvent(treeEvts)); - } - - /** - * Handle incoming autopsy event by queueing in batch and firing events. - * - * @param autopsyEvent The autopsy event. - */ - private void enqueueAutopsyEvent(PropertyChangeEvent autopsyEvent) { - List daoEvents = handleAutopsyEvent(Collections.singletonList(autopsyEvent)); - this.eventBatcher.enqueueAllEvents(daoEvents); - this.treeEventTimer.enqueueAll(daoEvents); - } - public PropertyChangeManager getResultEventsManager() { return this.resultEventsManager; } @@ -202,13 +184,98 @@ public class MainDAO extends AbstractDAO { return treeEventsManager; } + @Override + void clearCaches() { + allDAOs.forEach((subDAO) -> subDAO.clearCaches()); + } + + @Override + List processEvent(PropertyChangeEvent evt) { + return allDAOs.stream() + .map(subDAO -> subDAO.processEvent(evt)) + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toList()); + } + + @Override + List shouldRefreshTree() { + return allDAOs.stream() + .map((subDAO) -> subDAO.shouldRefreshTree()) + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toList()); + } + + @Override + Collection flushEvents() { + Stream> daoStreamEvts = allDAOs.stream() + .map((subDAO) -> subDAO.flushEvents()); + + Collection batchFlushedEvts = eventBatcher.flushEvents(); + + return Stream.concat(daoStreamEvts, Stream.of(batchFlushedEvts)) + .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) + .collect(Collectors.toList()); + } + + private void handleEvent(PropertyChangeEvent evt, boolean immediateAction) { + Collection daoEvts = processEvent(evt); + + Map> daoEvtsByType = daoEvts.stream() + .collect(Collectors.groupingBy(e -> e.getType())); + + fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); + + List resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); + if (immediateAction) { + fireResultEvts(resultEvts); + } else { + eventBatcher.enqueueAllEvents(resultEvts); + } + } + + private void handleEventFlush() { + Collection daoEvts = flushEvents(); + + Map> daoEvtsByType = daoEvts.stream() + .collect(Collectors.groupingBy(e -> e.getType())); + + fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); + + List resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); + fireResultEvts(resultEvts); + } + + private void fireResultEvts(Collection resultEvts) { + if (CollectionUtils.isNotEmpty(resultEvts)) { + resultEventsManager.firePropertyChange("DATA_CHANGE", null, new DAOAggregateEvent(resultEvts)); + } + } + + private void fireTreeEvts(Collection treeEvts) { + if (CollectionUtils.isNotEmpty(treeEvts)) { + treeEventsManager.firePropertyChange("TREE_CHANGE", null, new DAOAggregateEvent(treeEvts)); + } + } + + private void handleTreeEventTimeouts() { + fireTreeEvts(this.shouldRefreshTree()); + } + /** - * Registers listeners with autopsy event publishers. + * Registers listeners with autopsy event publishers and starts internal + * threads. */ - void register() { + void init() { IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); Case.addPropertyChangeListener(caseEventListener); UserPreferences.addChangeListener(userPreferenceListener); + + this.timeoutExecutor.scheduleAtFixedRate( + () -> handleTreeEventTimeouts(), + WATCH_RESOLUTION_MILLIS, + WATCH_RESOLUTION_MILLIS, + TimeUnit.MILLISECONDS); } @Override @@ -221,14 +288,15 @@ public class MainDAO extends AbstractDAO { */ void unregister() { IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); Case.removePropertyChangeListener(caseEventListener); UserPreferences.removeChangeListener(userPreferenceListener); } /** * A wrapper around property change support that exposes - * addPropertyChangeListener and removePropertyChangeListener so that weak - * listeners can automatically unregister. + * addPropertyChangeListener and removePropertyChangeListener so that + * netbeans weak listeners can automatically unregister. */ public static class PropertyChangeManager { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index cf3dc3e811..4996bd4dc9 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -184,7 +184,7 @@ public class OsAccountsDAO extends AbstractDAO { } @Override - List handleAutopsyEvent(Collection evts) { + List processEvent(Collection evts) { List daoEvts = evts.stream().filter(evt -> OS_EVENTS.contains(evt.getPropertyName())) .map(evt -> new OsAccountEvent()) .limit(1) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 837c124c39..86457eefbf 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -313,7 +313,7 @@ public class TagsDAO extends AbstractDAO { } @Override - List handleAutopsyEvent(Collection evts) { + List processEvent(Collection evts) { Map, Set>> mapping = new HashMap<>(); for (PropertyChangeEvent evt : evts) { // tag type, tag name id, data source id (or null if unknown) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index eb7bcd0c8f..2d42b4464e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -51,7 +51,7 @@ public class TreeResultsDTO { private final String displayName; private final String typeId; - private final TreeCount count; + private final Long count; private final T typeData; private final Object id; @@ -65,9 +65,10 @@ public class TreeResultsDTO { * @param id The id of this row. Can be any object that * implements equals and hashCode. * @param displayName The display name of this row. - * @param count The count of results for this row. + * @param count The count of results for this row or null if not + * applicable. */ - public TreeItemDTO(String typeId, T typeData, Object id, String displayName, TreeCount count) { + public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) { this.typeId = typeId; this.id = id; this.displayName = displayName; @@ -83,9 +84,9 @@ public class TreeResultsDTO { } /** - * @return The count of results for this row. + * @return The count of results for this row or null if not applicable. */ - public TreeCount getCount() { + public Long getCount() { return count; } @@ -113,6 +114,7 @@ public class TreeResultsDTO { public String getTypeId() { return typeId; } - + + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 95d998962e..86545e4e2f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -348,7 +348,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeExtensionsSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - TreeCount.getDeterminate(entry.getValue())); + entry.getValue()); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -382,7 +382,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeSizeSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - TreeCount.getDeterminate(entry.getValue())); + entry.getValue()); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -467,7 +467,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeMimeSearchParams(entry.getKey(), dataSourceId), name, name, - TreeCount.getDeterminate(entry.getValue())); + entry.getValue()); }) .sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType())) .collect(Collectors.toList()); @@ -653,7 +653,7 @@ public class ViewsDAO extends AbstractDAO { } @Override - List handleAutopsyEvent(Collection autopsyEvts) { + List processEvent(Collection autopsyEvts) { Map> fileExtensionDsMap = new HashMap<>(); Map>> mimeTypeDsMap = new HashMap<>(); Map> fileSizeDsMap = new HashMap<>(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java index 9599d2d041..f8555c402c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultEvent.java @@ -18,14 +18,16 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; +import org.sleuthkit.datamodel.BlackboardArtifact; + /** * An event for an artifact added or changed of a particular type possibly for a * particular data source. */ public class AnalysisResultEvent extends BlackboardArtifactEvent { - public AnalysisResultEvent(long artifactTypeId, long dataSourceId) { - super(artifactTypeId, dataSourceId); + public AnalysisResultEvent(BlackboardArtifact.Type artifactType, long dataSourceId) { + super(artifactType, dataSourceId); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java index 22a6d7b87c..67cf87cd82 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java @@ -18,6 +18,8 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; +import org.sleuthkit.datamodel.BlackboardArtifact; + /** * An event for an artifact added or changed of a particular type possibly for a * particular data source. @@ -25,8 +27,8 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; public class AnalysisResultSetEvent extends AnalysisResultEvent { private final String setName; - public AnalysisResultSetEvent(String setName, long artifactTypeId, long dataSourceId) { - super(artifactTypeId, dataSourceId); + public AnalysisResultSetEvent(String setName, BlackboardArtifact.Type artifactType, long dataSourceId) { + super(artifactType, dataSourceId); this.setName = setName; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java index 07231ce318..b5bbcec03e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java @@ -18,20 +18,23 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; +import java.util.Objects; +import org.sleuthkit.datamodel.BlackboardArtifact; + /** * An event for an artifact added in a particular type. */ public class BlackboardArtifactEvent implements DAOEvent { - private final long artifactTypeId; + private final BlackboardArtifact.Type artifactType; private final long dataSourceId; - BlackboardArtifactEvent(long artifactTypeId, long dataSourceId) { - this.artifactTypeId = artifactTypeId; + BlackboardArtifactEvent(BlackboardArtifact.Type artifactType, long dataSourceId) { + this.artifactType = artifactType; this.dataSourceId = dataSourceId; } - public long getArtifactTypeId() { - return artifactTypeId; + public BlackboardArtifact.Type getArtifactType() { + return artifactType; } public long getDataSourceId() { @@ -41,8 +44,8 @@ public class BlackboardArtifactEvent implements DAOEvent { @Override public int hashCode() { int hash = 5; - hash = 67 * hash + (int) (this.artifactTypeId ^ (this.artifactTypeId >>> 32)); - hash = 67 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32)); + hash = 17 * hash + Objects.hashCode(this.artifactType); + hash = 17 * hash + (int) (this.dataSourceId ^ (this.dataSourceId >>> 32)); return hash; } @@ -58,15 +61,16 @@ public class BlackboardArtifactEvent implements DAOEvent { return false; } final BlackboardArtifactEvent other = (BlackboardArtifactEvent) obj; - if (this.artifactTypeId != other.artifactTypeId) { + if (this.dataSourceId != other.dataSourceId) { return false; } - if (this.dataSourceId != other.dataSourceId) { + if (!Objects.equals(this.artifactType, other.artifactType)) { return false; } return true; } + @Override public Type getType() { return Type.RESULT; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java index adb5d905a9..f00832c922 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java @@ -26,21 +26,21 @@ import java.util.Collections; */ public class DAOAggregateEvent { - private final Collection objects; + private final Collection objects; /** * Main constructor. * * @param objects The list of events in this aggregate event. */ - public DAOAggregateEvent(Collection objects) { + public DAOAggregateEvent(Collection objects) { this.objects = Collections.unmodifiableCollection(objects); } /** * @return The events in this aggregate event. */ - public Collection getEvents() { + public Collection getEvents() { return objects; } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java index 005f5707f5..034dd59243 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java @@ -24,6 +24,7 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import org.apache.commons.collections4.CollectionUtils; /** * @@ -91,9 +92,25 @@ public class DAOEventBatcher { * @param events The events. */ public void enqueueAllEvents(Collection events) { + if (CollectionUtils.isNotEmpty(events)) { + synchronized (this.eventListLock) { + this.aggregateEvents.addAll(events); + verifyRunning(); + } + } + } + + /** + * Flushes any currently batched events emptying queue of batched events. + * + * @return The flushed events. + */ + public Collection flushEvents() { synchronized (this.eventListLock) { - this.aggregateEvents.addAll(events); - verifyRunning(); + Collection evtsToFire = this.aggregateEvents; + this.aggregateEvents = new HashSet<>(); + this.isRunning = false; + return evtsToFire; } } @@ -101,13 +118,6 @@ public class DAOEventBatcher { * Fires all events and clears batch. */ private void fireEvents() { - Collection evtsToFire; - synchronized (this.eventListLock) { - evtsToFire = this.aggregateEvents; - this.aggregateEvents = new HashSet<>(); - this.isRunning = false; - } - - this.eventsHandler.handle(evtsToFire); + this.eventsHandler.handle(flushEvents()); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java index 18537de574..3cbd809414 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DataArtifactEvent.java @@ -18,13 +18,15 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; +import org.sleuthkit.datamodel.BlackboardArtifact; + /** * An event for an artifact added or changed of a particular type possibly for a * particular data source. */ public class DataArtifactEvent extends BlackboardArtifactEvent { - public DataArtifactEvent(long artifactTypeId, long dataSourceId) { - super(artifactTypeId, dataSourceId); + public DataArtifactEvent(BlackboardArtifact.Type artifactType, long dataSourceId) { + super(artifactType, dataSourceId); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java index 4d8ce6030e..6141ac7805 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/KeywordHitEvent.java @@ -18,16 +18,19 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; +import org.sleuthkit.datamodel.BlackboardArtifact; + /** * An event for an artifact added or changed of a particular type possibly for a * particular data source. */ public class KeywordHitEvent extends AnalysisResultSetEvent { + private final String regex; private final String match; - public KeywordHitEvent(String regex, String match, String setName, long artifactTypeId, long dataSourceId) { - super(setName, artifactTypeId, dataSourceId); + public KeywordHitEvent(String regex, String match, String setName, BlackboardArtifact.Type artifactType, long dataSourceId) { + super(setName, artifactType, dataSourceId); this.regex = regex; this.match = match; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java index f36cfe9ed7..48c3706de5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java @@ -19,19 +19,22 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; - +import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; public class TreeEvent implements DAOEvent { - private final DAOEvent daoEvent; + + private final TreeItemDTO itemRecord; private final boolean determinate; - public TreeEvent(DAOEvent daoEvent, boolean determinate) { - this.daoEvent = daoEvent; + public TreeEvent(TreeItemDTO itemRecord, boolean determinate) { + this.itemRecord = itemRecord; this.determinate = determinate; } - public DAOEvent getDaoEvent() { - return daoEvent; + public TreeItemDTO getItemRecord() { + return itemRecord; } public boolean isDeterminate() { @@ -40,9 +43,9 @@ public class TreeEvent implements DAOEvent { @Override public int hashCode() { - int hash = 3; - hash = 41 * hash + Objects.hashCode(this.daoEvent); - hash = 41 * hash + (this.determinate ? 1 : 0); + int hash = 7; + hash = 89 * hash + Objects.hashCode(this.itemRecord); + hash = 89 * hash + (this.determinate ? 1 : 0); return hash; } @@ -61,16 +64,16 @@ public class TreeEvent implements DAOEvent { if (this.determinate != other.determinate) { return false; } - if (!Objects.equals(this.daoEvent, other.daoEvent)) { + if (!Objects.equals(this.itemRecord, other.itemRecord)) { return false; } return true; } + + @Override public Type getType() { return Type.TREE; } - - } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java similarity index 50% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java index 41c7ac5ad7..50e996232b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimer.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java @@ -18,39 +18,27 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; -import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.ScheduledThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -public class TreeEventTimer { - - public interface TreeEventHandler { - - void handleEvents(Collection events, boolean determinate); - } - - private final Map eventTimeouts = new HashMap<>(); +public class TreeEventTimedCache { + private static final long DEFAULT_TIMEOUT_MILLIS = 2 * 60 * 1000; + private final Object timeoutLock = new Object(); - private ScheduledFuture cancellableFuture; + private final Map eventTimeouts = new HashMap<>(); - private final TreeEventHandler eventsHandler; private final long timeoutMillis; - private final long watchResolutionMillis; - private final ScheduledThreadPoolExecutor timeoutExecutor - = new ScheduledThreadPoolExecutor(1, - new ThreadFactoryBuilder().setNameFormat(DAOEventBatcher.class.getName()).build()); - - public TreeEventTimer(TreeEventHandler eventsHandler, long timeoutMillis, long checkResolutionMillis) { - this.eventsHandler = eventsHandler; + + public TreeEventTimedCache() { + this(DEFAULT_TIMEOUT_MILLIS); + } + + public TreeEventTimedCache(long timeoutMillis) { this.timeoutMillis = timeoutMillis; - this.watchResolutionMillis = checkResolutionMillis; } private long getCurTime() { @@ -61,13 +49,11 @@ public class TreeEventTimer { return getCurTime() + timeoutMillis; } - public void enqueueAll(List events) { - List updateToIndeterminate = new ArrayList<>(); + public Collection enqueueAll(Collection events) { + Collection updateToIndeterminate = new ArrayList<>(); synchronized (this.timeoutLock) { - boolean needsWatch = this.eventTimeouts.isEmpty(); for (T event : events) { - // GVDTODO do we need to update all? this.eventTimeouts.compute(event, (k, v) -> { if (v == null) { updateToIndeterminate.add(event); @@ -75,41 +61,30 @@ public class TreeEventTimer { return getTimeoutTime(); }); } - - if (needsWatch) { - this.cancellableFuture = this.timeoutExecutor.scheduleAtFixedRate( - () -> handleEventTimeouts(), - this.watchResolutionMillis, - this.watchResolutionMillis, - TimeUnit.MILLISECONDS); - } } - if (!updateToIndeterminate.isEmpty()) { - this.eventsHandler.handleEvents(updateToIndeterminate, false); - } + return updateToIndeterminate; } - private void handleEventTimeouts() { + public Collection getEventTimeouts() { long curTime = getCurTime(); List toUpdate = new ArrayList<>(); synchronized (this.timeoutLock) { - if (Thread.interrupted()) { - return; - } - this.eventTimeouts.forEach((k, v) -> { if (v >= curTime) { toUpdate.add(k); this.eventTimeouts.remove(k); } }); - - if (this.eventTimeouts.isEmpty()) { - this.cancellableFuture.cancel(true); - } } + return toUpdate; + } - this.eventsHandler.handleEvents(toUpdate, true); + public Collection flushEvents() { + synchronized (this.timeoutLock) { + List toRet = new ArrayList<>(eventTimeouts.keySet()); + eventTimeouts.clear(); + return toRet; + } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 1ce6c5d8fd..12ce426a89 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -24,8 +24,9 @@ import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; -import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.DataArtifactEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.BlackboardArtifact; /** @@ -55,39 +56,20 @@ public class DataArtifactTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + if (daoEvt.getSearchParams() instanceof DataArtifactSearchParam) { + DataArtifactEvent dataArtifactEvent = (DataArtifactEvent) daoEvt.getDaoEvent(); + if (this.dataSourceId == null || this.dataSourceId == dataArtifactEvent.getDataSourceId()) { + return new TreeItemDTO + } + } + return null; } -// @Override -// public boolean isRefreshRequired(PropertyChangeEvent evt) { -// String eventType = evt.getPropertyName(); -// if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { -// /** -// * This is a stop gap measure until a different way of handling the -// * closing of cases is worked out. Currently, remote events may be -// * received for a case that is already closed. -// */ -// try { -// Case.getCurrentCaseThrows(); -// /** -// * Due to some unresolved issues with how cases are closed, it -// * is possible for the event to have a null oldValue if the -// * event is a remote event. -// */ -// final ModuleDataEvent event = (ModuleDataEvent) evt.getOldValue(); -// if (null != event && Category.DATA_ARTIFACT.equals(event.getBlackboardArtifactType().getCategory()) -// && !(DataArtifactDAO.getIgnoredTreeTypes().contains(event.getBlackboardArtifactType()))) { -// return true; -// } -// } catch (NoCurrentCaseException notUsed) { -// /** -// * Case is closed, do nothing. -// */ -// } -// } -// return false; -// } + @Override + public int compare(DataArtifactSearchParam o1, DataArtifactSearchParam o2) { + return o1.getArtifactType().getDisplayName().compareTo(o2.getArtifactType().getDisplayName()); + } /** * Display name and count of a data artifact type in the tree. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 1859f19b51..5cbded1b2f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -22,12 +22,12 @@ import com.google.common.collect.MapMaker; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.logging.Level; -import java.util.stream.Collectors; import org.openide.nodes.ChildFactory; import org.openide.nodes.Node; import org.openide.util.WeakListeners; @@ -42,7 +42,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** * Factory for populating tree with results. */ -public abstract class TreeChildFactory extends ChildFactory.Detachable { +public abstract class TreeChildFactory extends ChildFactory.Detachable implements Comparator { private static final Logger logger = Logger.getLogger(TreeChildFactory.class.getName()); @@ -52,17 +52,14 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item = getInvalidatedChild(treeEvt); + if (item != null) { + if (treeEvt.isDeterminate()) { + update(); + break; + } else { + setIndeterminate(item); } - break; } } } @@ -72,23 +69,30 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable> typeNodeMap = new MapMaker().weakValues().makeMap(); + private final Object resultsUpdateLock = new Object(); + private TreeResultsDTO curResults = null; + private List> curItemsList = new ArrayList<>(); private Map> idMapping = new HashMap<>(); @Override protected boolean createKeys(List toPopulate) { - if (curResults == null) { - try { - updateData(); - } catch (IllegalArgumentException | ExecutionException ex) { - logger.log(Level.WARNING, "An error occurred while fetching keys", ex); - return false; + List> itemsList; + synchronized (resultsUpdateLock) { + if (curResults == null) { + try { + updateData(); + } catch (IllegalArgumentException | ExecutionException ex) { + logger.log(Level.WARNING, "An error occurred while fetching keys", ex); + return false; + } } + itemsList = curItemsList; } // update existing cached nodes List curResultIds = new ArrayList<>(); - for (TreeItemDTO dto : curResults.getItems()) { + for (TreeItemDTO dto : itemsList) { TreeNode currentlyCached = typeNodeMap.get(dto.getId()); if (currentlyCached != null) { currentlyCached.update(dto); @@ -111,6 +115,33 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item) { + TreeNode cachedTreeNode = this.typeNodeMap.get(item.getId()); + if (cachedTreeNode == null) { + synchronized (resultsUpdateLock) { + // add to id mapping + this.idMapping.put(item.getId(), item); + + // insert in sorted position + int insertIndex = 0; + for (; insertIndex < this.curItemsList.size(); insertIndex++) { + if (this.compare(item.getTypeData(), this.curItemsList.get(insertIndex).getTypeData()) < 0) { + break; + } + } + this.curItemsList.add(insertIndex, item); + } + this.refresh(false); + } else { + cachedTreeNode.update(item); + } + } + /** * Updates local data by fetching data from the DAO's. * @@ -118,10 +149,18 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item.getId(), item -> item, (item1, item2) -> item1)); + synchronized (resultsUpdateLock) { + this.curResults = getChildResults(); + Map> idMapping = new HashMap<>(); + List> curItemsList = new ArrayList<>(); + for (TreeItemDTO item : this.curResults.getItems()) { + idMapping.put(item.getId(), item); + curItemsList.add(item); + } + this.idMapping = idMapping; + this.curItemsList = curItemsList; + } } /** @@ -141,9 +180,13 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable extends ChildFactory.Detachable extends ChildFactory.Detachable getChildResults() throws IllegalArgumentException, ExecutionException; - protected abstract boolean isChildInvalidating(DAOEvent daoEvt); + protected abstract TreeItemDTO getInvalidatedChild(TreeEvent daoEvt); } From 753d0ea603bd560d785f36f25d10b83819c7ea51 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 23 Nov 2021 17:43:20 -0500 Subject: [PATCH 078/142] updates --- .../mainui/datamodel/DataArtifactDAO.java | 24 ++--- .../autopsy/mainui/datamodel/TreeCount.java | 53 ----------- .../mainui/datamodel/TreeResultsDTO.java | 89 +++++++++++++++++-- .../mainui/datamodel/events/TreeEvent.java | 16 ++-- .../mainui/nodes/DataArtifactTypeFactory.java | 11 +-- .../mainui/nodes/TreeChildFactory.java | 15 +++- .../autopsy/mainui/nodes/TreeNode.java | 15 +--- 7 files changed, 127 insertions(+), 96 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index fb95f9bc75..9545b5576b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -42,6 +42,8 @@ import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimedCache; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; @@ -151,14 +153,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { // get row dto's sorted by display name Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() - .map(entry -> { - return new TreeResultsDTO.TreeItemDTO<>( - BlackboardArtifact.Category.DATA_ARTIFACT.name(), - new DataArtifactSearchParam(entry.getKey(), dataSourceId), - entry.getKey().getTypeID(), - entry.getKey().getDisplayName(), - entry.getValue()); - }) + .map(entry -> getTreeItem(entry.getKey(), dataSourceId, TreeDisplayCount.getDeterminate(entry.getValue()))) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -226,13 +221,22 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } List newTreeEvents = this.treeCache.enqueueAll(dataArtifactEvents).stream() - .map(daoEvt -> new TreeEvent(new DataArtifactSearchParam(daoEvt.getArtifactType(), daoEvt.getDataSourceId()), false)) + .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) .collect(Collectors.toList()); return Stream.of(dataArtifactEvents, newTreeEvents) .flatMap((lst) -> lst.stream()) .collect(Collectors.toList()); } + + private TreeItemDTO getTreeItem(BlackboardArtifact.Type artifactType, long dataSourceId, TreeDisplayCount displayCount) { + return new TreeResultsDTO.TreeItemDTO<>( + BlackboardArtifact.Category.DATA_ARTIFACT.name(), + new DataArtifactSearchParam(artifactType, dataSourceId), + artifactType.getTypeID(), + artifactType.getDisplayName(), + displayCount); + } @Override Collection flushEvents() { @@ -242,7 +246,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override Collection shouldRefreshTree() { return this.treeCache.getEventTimeouts().stream() - .map(dataEvt -> new TreeEvent(dataEvt, true)) + .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) .collect(Collectors.toList()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java deleted file mode 100644 index 4a88de5979..0000000000 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeCount.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.mainui.datamodel; - -/** - * Captures the count to be displayed in the UI. - */ -public class TreeCount { - public enum Type { - DETERMINATE, - INDETERMINATE, - NOT_SHOWN - } - - private final Type type; - private final long count; - - public static final TreeCount INDETERMINATE = new TreeCount(Type.INDETERMINATE, -1); - public static final TreeCount NOT_SHOWN = new TreeCount(Type.NOT_SHOWN, -1); - - public static TreeCount getDeterminate(long count) { - return new TreeCount(Type.DETERMINATE, count); - } - - private TreeCount(Type type, long count) { - this.type = type; - this.count = count; - } - - public Type getType() { - return type; - } - - public long getCount() { - return count; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 2d42b4464e..40256583db 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -19,6 +19,8 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; +import java.util.Objects; +import org.python.icu.text.MessageFormat; /** * A list of items to display in the tree. @@ -43,6 +45,84 @@ public class TreeResultsDTO { return items; } + /** + * Captures the count to be displayed in the UI. + */ + public static class TreeDisplayCount { + + public enum Type { + DETERMINATE, + INDETERMINATE, + NOT_SHOWN + } + + private final Type type; + private final long count; + + public static final TreeDisplayCount INDETERMINATE = new TreeDisplayCount(Type.INDETERMINATE, -1); + public static final TreeDisplayCount NOT_SHOWN = new TreeDisplayCount(Type.NOT_SHOWN, -1); + + public static TreeDisplayCount getDeterminate(long count) { + return new TreeDisplayCount(Type.DETERMINATE, count); + } + + private TreeDisplayCount(Type type, long count) { + this.type = type; + this.count = count; + } + + public Type getType() { + return type; + } + + public long getCount() { + return count; + } + + public String getDisplaySuffix() { + switch (this.type) { + case DETERMINATE: + return " (" + count + ")"; + case INDETERMINATE: + return "..."; + case NOT_SHOWN: + default: + return ""; + } + } + + @Override + public int hashCode() { + int hash = 5; + hash = 97 * hash + Objects.hashCode(this.type); + hash = 97 * hash + (int) (this.count ^ (this.count >>> 32)); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final TreeDisplayCount other = (TreeDisplayCount) obj; + if (this.count != other.count) { + return false; + } + if (this.type != other.type) { + return false; + } + return true; + } + + + } + /** * A result providing a category and a count for that category. Equals and * hashCode are based on id, type id, and type data. @@ -51,7 +131,7 @@ public class TreeResultsDTO { private final String displayName; private final String typeId; - private final Long count; + private final TreeDisplayCount count; private final T typeData; private final Object id; @@ -68,7 +148,7 @@ public class TreeResultsDTO { * @param count The count of results for this row or null if not * applicable. */ - public TreeItemDTO(String typeId, T typeData, Object id, String displayName, Long count) { + public TreeItemDTO(String typeId, T typeData, Object id, String displayName, TreeDisplayCount count) { this.typeId = typeId; this.id = id; this.displayName = displayName; @@ -86,7 +166,7 @@ public class TreeResultsDTO { /** * @return The count of results for this row or null if not applicable. */ - public Long getCount() { + public TreeDisplayCount getDisplayCount() { return count; } @@ -114,7 +194,6 @@ public class TreeResultsDTO { public String getTypeId() { return typeId; } - - + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java index 48c3706de5..9207a7f152 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java @@ -19,33 +19,31 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; -import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; -import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; public class TreeEvent implements DAOEvent { private final TreeItemDTO itemRecord; - private final boolean determinate; + private final boolean refreshRequired; - public TreeEvent(TreeItemDTO itemRecord, boolean determinate) { + public TreeEvent(TreeItemDTO itemRecord, boolean refreshRequired) { this.itemRecord = itemRecord; - this.determinate = determinate; + this.refreshRequired = refreshRequired; } public TreeItemDTO getItemRecord() { return itemRecord; } - public boolean isDeterminate() { - return determinate; + public boolean isRefreshRequired() { + return refreshRequired; } @Override public int hashCode() { int hash = 7; hash = 89 * hash + Objects.hashCode(this.itemRecord); - hash = 89 * hash + (this.determinate ? 1 : 0); + hash = 89 * hash + (this.refreshRequired ? 1 : 0); return hash; } @@ -61,7 +59,7 @@ public class TreeEvent implements DAOEvent { return false; } final TreeEvent other = (TreeEvent) obj; - if (this.determinate != other.determinate) { + if (this.refreshRequired != other.refreshRequired) { return false; } if (!Objects.equals(this.itemRecord, other.itemRecord)) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 12ce426a89..671e3c4d13 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -56,11 +56,12 @@ public class DataArtifactTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { - if (daoEvt.getSearchParams() instanceof DataArtifactSearchParam) { - DataArtifactEvent dataArtifactEvent = (DataArtifactEvent) daoEvt.getDaoEvent(); - if (this.dataSourceId == null || this.dataSourceId == dataArtifactEvent.getDataSourceId()) { - return new TreeItemDTO + protected TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + if (daoEvt.getItemRecord().getTypeData() instanceof DataArtifactSearchParam) { + TreeItemDTO originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); + DataArtifactSearchParam searchParam = originalTreeItem.getTypeData(); + if (this.dataSourceId == null || this.dataSourceId == searchParam.getDataSourceId()) { + return TreeChildFactory.getUpdatedTreeData(originalTreeItem, new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId())); } } return null; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 5cbded1b2f..61d0761614 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -54,11 +54,11 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item = getInvalidatedChild(treeEvt); if (item != null) { - if (treeEvt.isDeterminate()) { + if (treeEvt.isRefreshRequired()) { update(); break; } else { - setIndeterminate(item); + updateNodeData(item); } } } @@ -120,7 +120,7 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item) { + protected void updateNodeData(TreeItemDTO item) { TreeNode cachedTreeNode = this.typeNodeMap.get(item.getId()); if (cachedTreeNode == null) { synchronized (resultsUpdateLock) { @@ -223,6 +223,15 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable TreeItemDTO getUpdatedTreeData(TreeItemDTO original, T updatedData) { + return new TreeItemDTO<>( + original.getTypeId(), + updatedData, + original.getId(), + original.getDisplayName(), + original.getDisplayCount()); + } /** * Creates a TreeNode given the tree item data. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java index 00c90cb0e5..782e9af691 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import org.sleuthkit.autopsy.corecomponents.SelectionResponder; import java.text.MessageFormat; import java.util.logging.Level; +import org.bouncycastle.util.Objects; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.util.Lookup; @@ -89,14 +90,6 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo return itemData; } - /** - * Sets this node to an indeterminate state. - */ - void setIndeterminate() { - String baseName = this.itemData == null ? this.itemData.getDisplayName() : ""; - this.setDisplayName(baseName + "..."); - } - /** * Sets the display name of the node to include the display name and count * of the item. @@ -108,10 +101,10 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo // update display name only if there is a change. if (prevData == null || !prevData.getDisplayName().equals(curData.getDisplayName()) - || prevData.getCount() != curData.getCount()) { - String displayName = curData.getCount() == null + || !Objects.areEqual(prevData.getDisplayCount(), curData.getDisplayCount())) { + String displayName = curData.getDisplayCount() == null ? curData.getDisplayName() - : MessageFormat.format("{0} ({1})", curData.getDisplayName(), curData.getCount()); + : curData.getDisplayName() + curData.getDisplayCount().getDisplaySuffix(); this.setDisplayName(displayName); } From 4b8ecc65a825a041728255be959967c3d5068775 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 23 Nov 2021 19:19:22 -0500 Subject: [PATCH 079/142] initial work on tree dao events --- .../mainui/datamodel/AnalysisResultDAO.java | 103 +++++++++------ .../mainui/datamodel/CommAccountsDAO.java | 40 ++++-- .../mainui/datamodel/FileSystemDAO.java | 95 ++++++++------ .../mainui/datamodel/OsAccountsDAO.java | 29 ++-- .../autopsy/mainui/datamodel/TagsDAO.java | 30 +++-- .../mainui/datamodel/TreeResultsDTO.java | 1 - .../autopsy/mainui/datamodel/ViewsDAO.java | 78 ++++++----- .../nodes/AnalysisResultTypeFactory.java | 124 +++++++----------- .../mainui/nodes/DataArtifactTypeFactory.java | 1 - .../mainui/nodes/ViewsTypeFactory.java | 84 +++++++----- 10 files changed, 324 insertions(+), 261 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 336db9a8e6..677940ad81 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -48,6 +48,7 @@ import org.python.google.common.collect.ImmutableSet; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; @@ -139,7 +140,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public static Set getIgnoredTreeTypes() { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - + @SuppressWarnings("deprecation") private static final Set STANDARD_SET_TYPES = ImmutableSet.of( BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID(), @@ -154,7 +155,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); - + private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { SleuthkitCase skCase = getCase(); @@ -360,14 +361,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // get row dto's sorted by display name Map typeCounts = getCounts(BlackboardArtifact.Category.ANALYSIS_RESULT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() - .map(entry -> { - return new TreeResultsDTO.TreeItemDTO<>( - BlackboardArtifact.Category.ANALYSIS_RESULT.name(), - new AnalysisResultSearchParam(entry.getKey(), dataSourceId), - entry.getKey().getTypeID(), - entry.getKey().getDisplayName(), - entry.getValue()); - }) + .map(entry -> getTreeItem(entry.getKey(), dataSourceId, TreeDisplayCount.getDeterminate(entry.getValue()))) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -379,6 +373,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } } + private TreeItemDTO getTreeItem(BlackboardArtifact.Type type, Long dataSourceId, TreeDisplayCount displayCount) { + return new TreeItemDTO<>( + BlackboardArtifact.Category.ANALYSIS_RESULT.name(), + new AnalysisResultSearchParam(type, dataSourceId), + type.getTypeID(), + type.getDisplayName(), + displayCount); + } + /** * * @param type The artifact type to filter on. @@ -460,18 +463,28 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { .filter(entry -> nullSetName != null || entry.getKey() != null) .sorted((a, b) -> compareSetStrings(a.getKey(), b.getKey())) .map(entry -> { - return new TreeItemDTO<>( - type.getTypeName(), - new AnalysisResultSetSearchParam(type, dataSourceId, entry.getKey()), - entry.getKey() == null ? 0 : entry.getKey(), + return getSetTreeItem(type, + dataSourceId, + entry.getKey(), entry.getKey() == null ? nullSetName : entry.getKey(), - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .collect(Collectors.toList()); return new TreeResultsDTO<>(allSets); } + private TreeItemDTO getSetTreeItem(BlackboardArtifact.Type type, + long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) { + + return new TreeItemDTO<>( + type.getTypeName(), + new AnalysisResultSetSearchParam(type, dataSourceId, setName), + setName == null ? 0 : setName, + displayName, + displayCount); + } + /** * Compares set strings to properly order for the tree. * @@ -603,13 +616,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { break; } - items.add(new TreeItemDTO<>( + TreeItemDTO treeItem = new TreeItemDTO<>( "KEYWORD_SEARCH_TERMS", new KeywordSearchTermParams(setName, searchTerm, searchType, hasChildren, dataSourceId), searchTermModified, searchTermModified, - count - )); + TreeDisplayCount.getDeterminate(count) + ); + + items.add(treeItem); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); @@ -698,7 +713,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId), keyword, keyword == null ? "" : keyword, - count)); + TreeDisplayCount.getDeterminate(count))); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); @@ -722,11 +737,10 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { @Override Collection processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. - Map> analysisResultMap = new HashMap<>(); - Map, Set> setMap = new HashMap<>(); + Map> analysisResultMap = new HashMap<>(); + Map, Set> setMap = new HashMap<>(); Map> keywordHitsMap = new HashMap<>(); - ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); if (dataEvt != null) { for (BlackboardArtifact art : dataEvt.getArtifacts()) { @@ -736,11 +750,11 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } else if (STANDARD_SET_TYPES.contains(art.getArtifactTypeID())) { BlackboardAttribute setAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME); String setName = setAttr == null ? null : setAttr.getValueString(); - setMap.computeIfAbsent(Pair.of(art.getArtifactTypeID(), setName), (k) -> new HashSet<>()) + setMap.computeIfAbsent(Pair.of(art.getType(), setName), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { - analysisResultMap.computeIfAbsent(art.getArtifactTypeID(), (k) -> new HashSet<>()) + analysisResultMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); } } catch (TskCoreException ex) { @@ -749,7 +763,6 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } } - // don't continue if no relevant items found if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { return Collections.emptyList(); @@ -765,18 +778,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } /** - * Generate DAO events from digest of Autopsy events. + * Generate DAO events from digest of Autopsy events. * - * @param analysisResultMap Contains the analysis results that do not use - * a set name. A mapping of analysis result type ids to data - * sources where the results were created. + * @param analysisResultMap Contains the analysis results that do not use a + * set name. A mapping of analysis result type ids + * to data sources where the results were created. * @param resultsWithSetMap Contains the anlaysis results that do use a set - * name. A mapping of (analysis result type id, set name) to - * data sources where results were created. + * name. A mapping of (analysis result type id, set + * name) to data sources where results were + * created. * * @return The list of dao events. */ - private List getDAOEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { + private List getDAOEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { // invalidate cache entries that are affected by events Stream analysisResultEvts = analysisResultMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); @@ -793,14 +807,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { /** * Clears cache entries given the provided digests of autopsy events. * - * @param analysisResultMap Contains the analysis results that do not use - * a set name. A mapping of analysis result type ids to data - * sources where the results were created. + * @param analysisResultMap Contains the analysis results that do not use a + * set name. A mapping of analysis result type ids + * to data sources where the results were created. * @param resultsWithSetMap Contains the anlaysis results that do use a set - * name. A mapping of (analysis result type id, set name) to - * data sources where results were created. + * name. A mapping of (analysis result type id, set + * name) to data sources where results were + * created. */ - private void clearRelevantCacheEntries(Map> analysisResultMap, Map, Set> resultsWithSetMap) { + private void clearRelevantCacheEntries(Map> analysisResultMap, Map, Set> resultsWithSetMap) { ConcurrentMap, AnalysisResultTableSearchResultsDTO> arConcurrentMap = this.analysisResultCache.asMap(); arConcurrentMap.forEach((k, v) -> { BlackboardArtifactSearchParam searchParam = k.getParamData(); @@ -831,7 +846,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { @Override Collection shouldRefreshTree() { return this.treeCache.getEventTimeouts().stream() - .map(daoEvt -> new TreeEvent(daoEvt, true)) + .map(daoEvt -> { + // GVDTODO handle keyword items when integrated + if (daoEvt instanceof AnalysisResultSetEvent) { + AnalysisResultSetEvent setEvt = (AnalysisResultSetEvent) daoEvt; + return new TreeEvent(getSetTreeItem(setEvt.getArtifactType(), setEvt.getDataSourceId(), + setEvt.getSetName(), setEvt.getSetName() == null ? "" : setEvt.getSetName(), + TreeDisplayCount.INDETERMINATE), false); + } else { + return new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false); + } + }) .collect(Collectors.toList()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index b2d9966e8b..64cc40d823 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -23,6 +23,7 @@ import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -44,6 +45,7 @@ import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard; @@ -154,23 +156,35 @@ public class CommAccountsDAO extends AbstractDAO { } @Override - List processEvent(Collection evts) { + Collection flushEvents() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + Collection shouldRefreshTree() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + List processEvent(PropertyChangeEvent evt) { // maps account type to the data sources affected + // GVDTODO this can probably be rewritten now that it isn't handling a list of autopsy events Map> commAccountsAffected = new HashMap<>(); try { - for (PropertyChangeEvent evt : evts) { - String eventType = evt.getPropertyName(); - if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { - ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); - if (null != eventData - && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { - // check that the update is for the same account type - for (BlackboardArtifact artifact : eventData.getArtifacts()) { - BlackboardAttribute typeAttr = artifact.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); - commAccountsAffected.computeIfAbsent(typeAttr.getValueString(), (k) -> new HashSet<>()) - .add(artifact.getDataSourceObjectID()); - } + String eventType = evt.getPropertyName(); + if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { + ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); + if (null != eventData + && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + + // check that the update is for the same account type + for (BlackboardArtifact artifact : eventData.getArtifacts()) { + BlackboardAttribute typeAttr = artifact.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); + commAccountsAffected.computeIfAbsent(typeAttr.getValueString(), (k) -> new HashSet<>()) + .add(artifact.getDataSourceObjectID()); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index f35b46b48d..2e754f3b81 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableSet; import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; @@ -59,6 +60,7 @@ import static org.sleuthkit.autopsy.mainui.datamodel.ViewsDAO.getExtensionMediaT import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemContentEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemHostEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.FileSystemPersonEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; @@ -355,53 +357,64 @@ public class FileSystemDAO extends AbstractDAO { } @Override - List processEvent(Collection evts) { + Collection flushEvents() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + Collection shouldRefreshTree() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + List processEvent(PropertyChangeEvent evt) { + // GVDTODO these can probably be rewritten now that it isn't handling a collection of autopsy events Set affectedPersons = new HashSet<>(); Set affectedHosts = new HashSet<>(); Set affectedParentContent = new HashSet<>(); boolean refreshAllContent = false; - for (PropertyChangeEvent evt : evts) { - Content content = DAOEventUtils.getDerivedContentFromEvt(evt); - if (content != null) { - Content parentContent; - try { - parentContent = content.getParent(); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to get parent content of content with id: " + content.getId(), ex); - continue; - } - - if (parentContent == null) { - continue; - } - - if (invalidatesAllFileSystem(parentContent)) { - refreshAllContent = true; - } else { - affectedParentContent.add(parentContent.getId()); - } - } else if (evt instanceof DataSourceAddedEvent) { - Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource()); - if (hostId != null) { - affectedHosts.add(hostId); - } - } else if (evt instanceof DataSourceNameChangedEvent) { - Long hostId = getHostFromDs(((DataSourceNameChangedEvent) evt).getDataSource()); - if (hostId != null) { - affectedHosts.add(hostId); - } - } else if (evt instanceof HostsAddedEvent) { - // GVDTODO how best to handle host added? - } else if (evt instanceof HostsUpdatedEvent) { - // GVDTODO how best to handle host updated? - } else if (evt instanceof HostsAddedToPersonEvent) { - Person person = ((HostsAddedToPersonEvent) evt).getPerson(); - affectedPersons.add(person == null ? null : person.getPersonId()); - } else if (evt instanceof HostsRemovedFromPersonEvent) { - Person person = ((HostsRemovedFromPersonEvent) evt).getPerson(); - affectedPersons.add(person == null ? null : person.getPersonId()); + Content content = DAOEventUtils.getDerivedContentFromEvt(evt); + if (content != null) { + Content parentContent; + try { + parentContent = content.getParent(); + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Unable to get parent content of content with id: " + content.getId(), ex); + return Collections.emptyList(); } + + if (parentContent == null) { + return Collections.emptyList(); + } + + if (invalidatesAllFileSystem(parentContent)) { + refreshAllContent = true; + } else { + affectedParentContent.add(parentContent.getId()); + } + } else if (evt instanceof DataSourceAddedEvent) { + Long hostId = getHostFromDs(((DataSourceAddedEvent) evt).getDataSource()); + if (hostId != null) { + affectedHosts.add(hostId); + } + } else if (evt instanceof DataSourceNameChangedEvent) { + Long hostId = getHostFromDs(((DataSourceNameChangedEvent) evt).getDataSource()); + if (hostId != null) { + affectedHosts.add(hostId); + } + } else if (evt instanceof HostsAddedEvent) { + // GVDTODO how best to handle host added? + } else if (evt instanceof HostsUpdatedEvent) { + // GVDTODO how best to handle host updated? + } else if (evt instanceof HostsAddedToPersonEvent) { + Person person = ((HostsAddedToPersonEvent) evt).getPerson(); + affectedPersons.add(person == null ? null : person.getPersonId()); + } else if (evt instanceof HostsRemovedFromPersonEvent) { + Person person = ((HostsRemovedFromPersonEvent) evt).getPerson(); + affectedPersons.add(person == null ? null : person.getPersonId()); } final boolean triggerFullRefresh = refreshAllContent; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 4996bd4dc9..facae00ba5 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -25,6 +25,7 @@ import java.beans.PropertyChangeEvent; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Optional; @@ -41,6 +42,7 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; import org.sleuthkit.autopsy.mainui.datamodel.events.OsAccountEvent; import org.sleuthkit.autopsy.mainui.datamodel.ContentRowDTO.OsAccountRowDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.TskCoreException; @@ -184,17 +186,26 @@ public class OsAccountsDAO extends AbstractDAO { } @Override - List processEvent(Collection evts) { - List daoEvts = evts.stream().filter(evt -> OS_EVENTS.contains(evt.getPropertyName())) - .map(evt -> new OsAccountEvent()) - .limit(1) - .collect(Collectors.toList()); + Collection flushEvents() { + // GVDTODO + return Collections.emptyList(); + } - if (!daoEvts.isEmpty()) { - this.searchParamsCache.invalidateAll(); + @Override + Collection shouldRefreshTree() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + List processEvent(PropertyChangeEvent evt) { + if (!OS_EVENTS.contains(evt.getPropertyName())) { + return Collections.emptyList(); } - - return daoEvts; + + this.searchParamsCache.invalidateAll(); + + return Collections.singletonList(new OsAccountEvent()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 86457eefbf..ee0eaf71c6 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -53,6 +53,7 @@ import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams.TagType; import org.sleuthkit.autopsy.mainui.datamodel.events.TagsEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifactTag; @@ -313,17 +314,30 @@ public class TagsDAO extends AbstractDAO { } @Override - List processEvent(Collection evts) { + Collection flushEvents() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + Collection shouldRefreshTree() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + List processEvent(PropertyChangeEvent evt) { + // GVDTODO this may be rewritten simpler now that it isn't processing a list of events Map, Set>> mapping = new HashMap<>(); - for (PropertyChangeEvent evt : evts) { - // tag type, tag name id, data source id (or null if unknown) - Triple data = getTagData(evt); - if (data != null) { - mapping.computeIfAbsent(Pair.of(data.getLeft(), data.getMiddle()), k -> new HashSet<>()) - .add(Optional.ofNullable(data.getRight())); - } + + // tag type, tag name id, data source id (or null if unknown) + Triple data = getTagData(evt); + if (data != null) { + mapping.computeIfAbsent(Pair.of(data.getLeft(), data.getMiddle()), k -> new HashSet<>()) + .add(Optional.ofNullable(data.getRight())); } + // don't continue if no mapping entries if (mapping.isEmpty()) { return Collections.emptyList(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 40256583db..f3d5d09a7c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; import java.util.Objects; -import org.python.icu.text.MessageFormat; /** * A list of items to display in the tree. diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 86545e4e2f..a18e229ade 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -48,11 +48,13 @@ import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTr import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree; import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeExtensionsEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeMimeEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.FileTypeSizeEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement; @@ -348,7 +350,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeExtensionsSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -382,7 +384,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeSizeSearchParams(entry.getKey(), dataSourceId), entry.getKey(), entry.getKey().getDisplayName(), - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName())) .collect(Collectors.toList()); @@ -467,7 +469,7 @@ public class ViewsDAO extends AbstractDAO { new FileTypeMimeSearchParams(entry.getKey(), dataSourceId), name, name, - entry.getValue()); + TreeDisplayCount.getDeterminate(entry.getValue())); }) .sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType())) .collect(Collectors.toList()); @@ -653,44 +655,54 @@ public class ViewsDAO extends AbstractDAO { } @Override - List processEvent(Collection autopsyEvts) { + Collection flushEvents() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + Collection shouldRefreshTree() { + // GVDTODO + return Collections.emptyList(); + } + + @Override + List processEvent(PropertyChangeEvent evt) { Map> fileExtensionDsMap = new HashMap<>(); Map>> mimeTypeDsMap = new HashMap<>(); Map> fileSizeDsMap = new HashMap<>(); - for (PropertyChangeEvent evt : autopsyEvts) { - AbstractFile af = DAOEventUtils.getFileFromEvt(evt); - if (af == null) { - continue; - } + AbstractFile af = DAOEventUtils.getFileFromEvt(evt); + if (af == null) { + return Collections.emptyList(); + } - // create an extension mapping if extension present - if (!StringUtils.isBlank(af.getNameExtension())) { - fileExtensionDsMap - .computeIfAbsent(af.getNameExtension(), (k) -> new HashSet<>()) - .add(af.getDataSourceObjectId()); - } + // create an extension mapping if extension present + if (!StringUtils.isBlank(af.getNameExtension())) { + fileExtensionDsMap + .computeIfAbsent(af.getNameExtension(), (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } - // create a mime type mapping if mime type present - if (!StringUtils.isBlank(af.getMIMEType())) { - Pair mimePieces = getMimePieces(af.getMIMEType()); - mimeTypeDsMap - .computeIfAbsent(mimePieces.getKey(), (k) -> new HashMap<>()) - .computeIfAbsent(mimePieces.getValue(), (k) -> new HashSet<>()) - .add(af.getDataSourceObjectId()); - } + // create a mime type mapping if mime type present + if (!StringUtils.isBlank(af.getMIMEType())) { + Pair mimePieces = getMimePieces(af.getMIMEType()); + mimeTypeDsMap + .computeIfAbsent(mimePieces.getKey(), (k) -> new HashMap<>()) + .computeIfAbsent(mimePieces.getValue(), (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); + } - // create a size mapping if size present - FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values()) - .filter(filter -> af.getSize() >= filter.getMinBound() && af.getSize() < filter.getMaxBound()) - .findFirst() - .orElse(null); + // create a size mapping if size present + FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values()) + .filter(filter -> af.getSize() >= filter.getMinBound() && af.getSize() < filter.getMaxBound()) + .findFirst() + .orElse(null); - if (sizeFilter != null) { - fileSizeDsMap - .computeIfAbsent(sizeFilter, (k) -> new HashSet<>()) - .add(af.getDataSourceObjectId()); - } + if (sizeFilter != null) { + fileSizeDsMap + .computeIfAbsent(sizeFilter, (k) -> new HashSet<>()) + .add(af.getDataSourceObjectId()); } if (fileExtensionDsMap.isEmpty() && mimeTypeDsMap.isEmpty() && fileSizeDsMap.isEmpty()) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java index d1f945148c..fc6477b976 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableSet; import java.beans.PropertyChangeEvent; import java.util.Set; import java.util.concurrent.ExecutionException; +import org.apache.commons.lang3.StringUtils; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.util.NbBundle.Messages; @@ -33,15 +34,14 @@ import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.Category; /** * Factory for displaying analysis result types in the tree. @@ -93,72 +93,16 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(AnalysisResultSearchParam o1, AnalysisResultSearchParam o2) { + return o1.getArtifactType().getDisplayName().compareTo(o2.getArtifactType().getDisplayName()); } /** @@ -232,15 +176,21 @@ public class AnalysisResultTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { return new TreeSetTypeNode(rowData); } + + @Override + protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(AnalysisResultSetSearchParam o1, AnalysisResultSetSearchParam o2) { + return StringUtils.compare(o1.getSetName(), o2.getSetName(), true); + } } /** @@ -328,10 +278,18 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(KeywordSearchTermParams o1, KeywordSearchTermParams o2) { + return StringUtils.compare(o1.getSearchTerm(), o2.getSearchTerm(), true); + } + + } @@ -402,10 +360,16 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(KeywordMatchParams o1, KeywordMatchParams o2) { + return StringUtils.compare(o1.getKeywordMatch(), o2.getKeywordMatch(), true); + } } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 671e3c4d13..d92a15d99c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -25,7 +25,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; -import org.sleuthkit.autopsy.mainui.datamodel.events.DataArtifactEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.BlackboardArtifact; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index 65a3e873e5..bbb0149bf5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -23,6 +23,7 @@ import java.util.Collection; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.apache.commons.lang3.StringUtils; import org.openide.nodes.Children; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; @@ -33,9 +34,9 @@ import org.sleuthkit.autopsy.mainui.datamodel.FileExtSearchFilter; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; -import org.sleuthkit.autopsy.mainui.datamodel.FileSizeFilter; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.AbstractFile; /** @@ -99,22 +100,17 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileSizeCounts(this.dataSourceId); } -// @Override -// public boolean isRefreshRequired(PropertyChangeEvent evt) { -// AbstractFile evtFile = getFileInDataSourceFromEvt(evt, this.dataSourceId); -// if (evtFile == null) { -// return false; -// } -// -// long size = evtFile.getSize(); -// for (FileSizeFilter filter : FileSizeFilter.values()) { -// if (size >= filter.getMinBound() || size < filter.getMaxBound()) { -// return true; -// } -// } -// -// return false; -// } + @Override + protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(FileTypeSizeSearchParams o1, FileTypeSizeSearchParams o2) { + return Integer.compare(o1.getSizeFilter().getId(), o2.getSizeFilter().getId()); + } + /** * Shows a file size tree node. @@ -164,10 +160,17 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileMimeCounts(null, this.dataSourceId); } -// @Override -// public boolean isRefreshRequired(PropertyChangeEvent evt) { -// return getFileInDataSourceFromEvt(evt, this.dataSourceId) != null; -// } + @Override + protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(FileTypeMimeSearchParams o1, FileTypeMimeSearchParams o2) { + return StringUtils.compare(o1.getMimeType(), o2.getMimeType(), true); + } + static class FileMimePrefixNode extends TreeNode { @@ -218,15 +221,18 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileMimeCounts(this.mimeTypePrefix, this.dataSourceId); } -// @Override -// public boolean isRefreshRequired(PropertyChangeEvent evt) { -// AbstractFile file = getFileInDataSourceFromEvt(evt, dataSourceId); -// if (file == null || file.getMIMEType() == null) { -// return false; -// } -// -// return file.getMIMEType().toLowerCase().startsWith(this.mimeTypePrefix.toLowerCase()); -// } + @Override + protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + // GVDTODO + return null; + } + + @Override + public int compare(FileTypeMimeSearchParams o1, FileTypeMimeSearchParams o2) { + return StringUtils.compare(o1.getMimeType(), o2.getMimeType(), true); + } + + /** * Displays an individual suffix node in the tree (i.e. 'aac' underneath @@ -302,12 +308,18 @@ public class ViewsTypeFactory { return MainDAO.getInstance().getViewsDAO().getFileExtCounts(this.childFilters, this.dataSourceId); } -// @Override -// public boolean isRefreshRequired(PropertyChangeEvent evt) { -// AbstractFile file = getFileInDataSourceFromEvt(evt, this.dataSourceId); -// return file != null && file.getNameExtension() != null && -// this.childFilters.stream().anyMatch((filter) -> filter.getFilter().contains("." + file.getNameExtension().toLowerCase())); -// } + @Override + protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + //GVDTODO + return null; + } + + @Override + public int compare(FileTypeExtensionsSearchParams o1, FileTypeExtensionsSearchParams o2) { + return StringUtils.compare(o1.getFilter().getDisplayName(), o2.getFilter().getDisplayName()); + } + + /** * Represents a file extension tree node that may or may not have child From e13396e8030be1713dc4122b7345f6fe5b9d866f Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 23 Nov 2021 20:02:11 -0500 Subject: [PATCH 080/142] show indeterminate when fetching counts --- .../mainui/datamodel/DataArtifactDAO.java | 35 ++++++++++++------- .../datamodel/events/TreeEventTimedCache.java | 6 ++++ 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 9545b5576b..de32ed9e76 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -70,7 +70,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return instance; } - + /** * @return The set of types that are not shown in the tree. */ @@ -78,8 +78,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - - private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); + private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -151,9 +150,19 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { public TreeResultsDTO getDataArtifactCounts(Long dataSourceId) throws ExecutionException { try { // get row dto's sorted by display name + Set indeterminateTypes = this.treeCache.getEnqueued().stream() + .filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId) + .map(evt -> evt.getArtifactType()) + .collect(Collectors.toSet()); + Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() - .map(entry -> getTreeItem(entry.getKey(), dataSourceId, TreeDisplayCount.getDeterminate(entry.getValue()))) + .map(entry -> { + return getTreeItem(entry.getKey(), dataSourceId, + indeterminateTypes.contains(entry.getKey()) + ? TreeDisplayCount.INDETERMINATE + : TreeDisplayCount.getDeterminate(entry.getValue())); + }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); @@ -178,7 +187,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { if (evt == null) { return Collections.emptyList(); } - + Map> artifactTypeDataSourceMap = dataEvt.getArtifacts().stream() .map((art) -> { try { @@ -219,23 +228,23 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { dataArtifactEvents.add(newEvt); } } - + List newTreeEvents = this.treeCache.enqueueAll(dataArtifactEvents).stream() .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) .collect(Collectors.toList()); - + return Stream.of(dataArtifactEvents, newTreeEvents) .flatMap((lst) -> lst.stream()) .collect(Collectors.toList()); } - + private TreeItemDTO getTreeItem(BlackboardArtifact.Type artifactType, long dataSourceId, TreeDisplayCount displayCount) { return new TreeResultsDTO.TreeItemDTO<>( - BlackboardArtifact.Category.DATA_ARTIFACT.name(), - new DataArtifactSearchParam(artifactType, dataSourceId), - artifactType.getTypeID(), - artifactType.getDisplayName(), - displayCount); + BlackboardArtifact.Category.DATA_ARTIFACT.name(), + new DataArtifactSearchParam(artifactType, dataSourceId), + artifactType.getTypeID(), + artifactType.getDisplayName(), + displayCount); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java index 50e996232b..94aa1ed64f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java @@ -21,8 +21,10 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; public class TreeEventTimedCache { private static final long DEFAULT_TIMEOUT_MILLIS = 2 * 60 * 1000; @@ -65,6 +67,10 @@ public class TreeEventTimedCache { return updateToIndeterminate; } + + public Set getEnqueued() { + return new HashSet<>(eventTimeouts.keySet()); + } public Collection getEventTimeouts() { long curTime = getCurTime(); From 71e8c2edface29c7d14c79f344831d2dd8696403 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 23 Nov 2021 20:31:22 -0500 Subject: [PATCH 081/142] bug fixes --- .../mainui/datamodel/AnalysisResultDAO.java | 2 +- .../mainui/datamodel/DataArtifactDAO.java | 2 +- .../autopsy/mainui/datamodel/ViewsDAO.java | 1 + .../nodes/AnalysisResultTypeFactory.java | 25 +++++------ .../mainui/nodes/DataArtifactTypeFactory.java | 1 + .../autopsy/mainui/nodes/TreeNode.java | 4 +- .../mainui/nodes/ViewsTypeFactory.java | 43 +++---------------- 7 files changed, 21 insertions(+), 57 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 677940ad81..fa5be343f9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -475,7 +475,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } private TreeItemDTO getSetTreeItem(BlackboardArtifact.Type type, - long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) { + Long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) { return new TreeItemDTO<>( type.getTypeName(), diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index de32ed9e76..c4fe361484 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -238,7 +238,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .collect(Collectors.toList()); } - private TreeItemDTO getTreeItem(BlackboardArtifact.Type artifactType, long dataSourceId, TreeDisplayCount displayCount) { + private TreeItemDTO getTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeResultsDTO.TreeItemDTO<>( BlackboardArtifact.Category.DATA_ARTIFACT.name(), new DataArtifactSearchParam(artifactType, dataSourceId), diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index a18e229ade..547f389456 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -668,6 +668,7 @@ public class ViewsDAO extends AbstractDAO { @Override List processEvent(PropertyChangeEvent evt) { + // GVDTODO maps may not be necessary now that this isn't processing a list of events. Map> fileExtensionDsMap = new HashMap<>(); Map>> mimeTypeDsMap = new HashMap<>(); Map> fileSizeDsMap = new HashMap<>(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java index fc6477b976..3b58de71d7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java @@ -21,19 +21,14 @@ package org.sleuthkit.autopsy.mainui.nodes; import org.sleuthkit.autopsy.mainui.datamodel.KeywordSearchTermParams; import org.sleuthkit.autopsy.mainui.datamodel.KeywordMatchParams; import com.google.common.collect.ImmutableSet; -import java.beans.PropertyChangeEvent; +import java.util.Comparator; import java.util.Set; import java.util.concurrent.ExecutionException; -import org.apache.commons.lang3.StringUtils; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; @@ -48,6 +43,9 @@ import org.sleuthkit.datamodel.BlackboardArtifact; */ public class AnalysisResultTypeFactory extends TreeChildFactory { + private final static Comparator STRING_COMPARATOR = Comparator.nullsFirst(Comparator.naturalOrder()); + + @SuppressWarnings("deprecation") private static Set SET_TREE_ARTIFACTS = ImmutableSet.of( BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID(), BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(), @@ -93,7 +91,6 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { // GVDTODO @@ -189,7 +186,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { super(itemData.getTypeData().getArtifactType().getTypeName(), @@ -240,7 +237,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { super(itemData.getTypeData().getArtifactType().getTypeName(), @@ -248,7 +245,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { if (daoEvt.getItemRecord().getTypeData() instanceof DataArtifactSearchParam) { + @SuppressWarnings("unchecked") TreeItemDTO originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); DataArtifactSearchParam searchParam = originalTreeItem.getTypeData(); if (this.dataSourceId == null || this.dataSourceId == searchParam.getDataSourceId()) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java index 782e9af691..70eb72ae08 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java @@ -20,8 +20,8 @@ package org.sleuthkit.autopsy.mainui.nodes; import org.sleuthkit.autopsy.corecomponents.SelectionResponder; import java.text.MessageFormat; +import java.util.Objects; import java.util.logging.Level; -import org.bouncycastle.util.Objects; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.util.Lookup; @@ -101,7 +101,7 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo // update display name only if there is a change. if (prevData == null || !prevData.getDisplayName().equals(curData.getDisplayName()) - || !Objects.areEqual(prevData.getDisplayCount(), curData.getDisplayCount())) { + || !Objects.equals(prevData.getDisplayCount(), curData.getDisplayCount())) { String displayName = curData.getDisplayCount() == null ? curData.getDisplayName() : curData.getDisplayName() + curData.getDisplayCount().getDisplaySuffix(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index bbb0149bf5..21cc369db5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -20,10 +20,10 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.beans.PropertyChangeEvent; import java.util.Collection; +import java.util.Comparator; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.lang3.StringUtils; import org.openide.nodes.Children; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; @@ -45,34 +45,7 @@ import org.sleuthkit.datamodel.AbstractFile; */ public class ViewsTypeFactory { - /** - * Returns an AbstractFile if the event contains a ModuleContentEvent which - * contains an abstract file and that file belongs to the data source if a - * data source id is specified. Otherwise, returns null. - * - * @param evt The event - * @param dataSourceId The data source object id that will be the parent of - * the file or null. - * - * @return The file meeting criteria or null. - */ - private static AbstractFile getFileInDataSourceFromEvt(PropertyChangeEvent evt, Long dataSourceId) { - if (!(evt.getOldValue() instanceof ModuleContentEvent)) { - return null; - } - - ModuleContentEvent contentEvt = (ModuleContentEvent) evt.getOldValue(); - if (!(contentEvt.getSource() instanceof AbstractFile)) { - return null; - } - - AbstractFile file = (AbstractFile) contentEvt.getSource(); - if (dataSourceId != null && file.getDataSourceObjectId() != dataSourceId) { - return null; - } - - return file; - } + private static final Comparator STRING_COMPARATOR = Comparator.nullsFirst(Comparator.naturalOrder()); /** * The factory for creating file size tree nodes. @@ -111,7 +84,6 @@ public class ViewsTypeFactory { return Integer.compare(o1.getSizeFilter().getId(), o2.getSizeFilter().getId()); } - /** * Shows a file size tree node. */ @@ -168,10 +140,9 @@ public class ViewsTypeFactory { @Override public int compare(FileTypeMimeSearchParams o1, FileTypeMimeSearchParams o2) { - return StringUtils.compare(o1.getMimeType(), o2.getMimeType(), true); + return STRING_COMPARATOR.compare(o1.getMimeType(), o2.getMimeType()); } - static class FileMimePrefixNode extends TreeNode { /** @@ -229,11 +200,9 @@ public class ViewsTypeFactory { @Override public int compare(FileTypeMimeSearchParams o1, FileTypeMimeSearchParams o2) { - return StringUtils.compare(o1.getMimeType(), o2.getMimeType(), true); + return STRING_COMPARATOR.compare(o1.getMimeType(), o2.getMimeType()); } - - /** * Displays an individual suffix node in the tree (i.e. 'aac' underneath * 'audio'). @@ -316,11 +285,9 @@ public class ViewsTypeFactory { @Override public int compare(FileTypeExtensionsSearchParams o1, FileTypeExtensionsSearchParams o2) { - return StringUtils.compare(o1.getFilter().getDisplayName(), o2.getFilter().getDisplayName()); + return STRING_COMPARATOR.compare(o1.getFilter().getDisplayName(), o2.getFilter().getDisplayName()); } - - /** * Represents a file extension tree node that may or may not have child * filters. From 707a9f640b8b8f75ed7e09bb9315aecd2262fb57 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Wed, 24 Nov 2021 10:04:24 -0500 Subject: [PATCH 082/142] Revered thumb nail sub paging --- .../autopsy/corecomponents/Bundle.properties | 8 + .../corecomponents/Bundle.properties-MERGED | 14 +- .../DataResultViewerThumbnail.form | 194 +++++++++ .../DataResultViewerThumbnail.java | 412 ++++++++++++++++-- .../corecomponents/ThumbnailViewChildren.java | 168 ++++--- 5 files changed, 707 insertions(+), 89 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties index 5ee772af47..5b391d9d75 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties @@ -247,3 +247,11 @@ DataResultPanel.pagesLabel.text=Pages: DataResultPanel.pageNumLabel.text= DataResultPanel.pageNextButton.text= DataResultPanel.pagePrevButton.text= + +DataResultViewerThumbnail.pageLabel.text=Page: +DataResultViewerThumbnail.pagesLabel.text=Pages: +DataResultViewerThumbnail.pagePrevButton.text= +DataResultViewerThumbnail.pageNextButton.text= +DataResultViewerThumbnail.pageNumLabel.text=- +DataResultViewerThumbnail.goToPageLabel.text=Go to Page: +DataResultViewerThumbnail.goToPageField.text= diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED index 07a42f5e19..cde18d3900 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/Bundle.properties-MERGED @@ -72,9 +72,9 @@ DataContentViewerHex.totalPageLabel.text_1=100 DataContentViewerHex.pageLabel2.text=Page # Product Information panel -LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
+LBL_Description=
\n Product Version: {0} ({9})
Sleuth Kit Version: {7}
Netbeans RCP Build: {8}
Java: {1}; {2}
System: {3}; {4}; {5}
Userdir: {6}
Format_OperatingSystem_Value={0} version {1} running on {2} -LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
+LBL_Copyright=
Autopsy™ is a digital forensics platform based on The Sleuth Kit™ and other tools.
Copyright © 2003-2020.
SortChooser.dialogTitle=Choose Sort Criteria ThumbnailViewChildren.progress.cancelling=(Cancelling) # {0} - file name @@ -97,7 +97,7 @@ DataContentViewerHex.goToPageTextField.text= DataContentViewerHex.goToPageLabel.text=Go to Page: DataResultViewerThumbnail.imagesLabel.text=Images: DataResultViewerThumbnail.imagesRangeLabel.text=- -DataResultViewerThumbnail.filePathLabel.text=\ \ \ +DataResultViewerThumbnail.filePathLabel.text=\ AdvancedConfigurationDialog.cancelButton.text=Cancel DataArtifactContentViewer.waitText=Retrieving and preparing data, please wait... DataArtifactContentViewer.errorText=Error retrieving result @@ -311,3 +311,11 @@ DataResultPanel.pagesLabel.text=Pages: DataResultPanel.pageNumLabel.text= DataResultPanel.pageNextButton.text= DataResultPanel.pagePrevButton.text= + +DataResultViewerThumbnail.pageLabel.text=Page: +DataResultViewerThumbnail.pagesLabel.text=Pages: +DataResultViewerThumbnail.pagePrevButton.text= +DataResultViewerThumbnail.pageNextButton.text= +DataResultViewerThumbnail.pageNumLabel.text=- +DataResultViewerThumbnail.goToPageLabel.text=Go to Page: +DataResultViewerThumbnail.goToPageField.text= diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form index c8bd1c3d78..798b0b51bd 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.form @@ -27,6 +27,200 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java index 6c5a23ad67..c2a533bc2b 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java @@ -21,18 +21,20 @@ package org.sleuthkit.autopsy.corecomponents; import java.awt.Color; import java.awt.Cursor; import java.awt.Dialog; +import java.awt.EventQueue; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.logging.Level; -import java.util.prefs.PreferenceChangeListener; import java.util.prefs.Preferences; import java.util.stream.Collectors; +import javax.swing.JOptionPane; import javax.swing.ListSelectionModel; import javax.swing.SortOrder; -import javax.swing.SwingUtilities; +import javax.swing.SwingWorker; import org.apache.commons.lang3.StringUtils; import org.netbeans.api.progress.ProgressHandle; import org.openide.DialogDescriptor; @@ -42,10 +44,13 @@ import org.openide.explorer.ExplorerManager; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.nodes.Node; +import org.openide.nodes.NodeEvent; +import org.openide.nodes.NodeListener; +import org.openide.nodes.NodeMemberEvent; +import org.openide.nodes.NodeReorderEvent; import org.openide.util.NbBundle; import org.openide.util.NbPreferences; import org.openide.util.lookup.ServiceProvider; -import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.corecomponentinterfaces.DataResultViewer; import static org.sleuthkit.autopsy.corecomponents.Bundle.*; import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion; @@ -73,9 +78,13 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(DataResultViewerThumbnail.class.getName()); + private final PageUpdater pageUpdater = new PageUpdater(); private TableFilterNode rootNode; private ThumbnailViewChildren rootNodeChildren; private NodeSelectionListener selectionListener; + private int currentPage; + private int totalPages; + private int currentPageImages; private int thumbSize = ImageUtils.ICON_SIZE_MEDIUM; /** @@ -112,7 +121,10 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { Bundle.DataResultViewerThumbnail_thumbnailSizeComboBox_medium(), Bundle.DataResultViewerThumbnail_thumbnailSizeComboBox_large()})); thumbnailSizeComboBox.setSelectedIndex(1); - + currentPage = -1; + totalPages = 0; + currentPageImages = 0; + // The GUI builder is using FlowLayout therefore this change so have no // impact on the initally designed layout. This change will just effect // how the components are laid out as size of the window changes. @@ -130,6 +142,20 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { java.awt.GridBagConstraints gridBagConstraints; buttonBarPanel = new javax.swing.JPanel(); + pagesPanel = new javax.swing.JPanel(); + pageNumberPane = new javax.swing.JPanel(); + pageLabel = new javax.swing.JLabel(); + pageNumLabel = new javax.swing.JLabel(); + pageButtonPanel = new javax.swing.JPanel(); + pagesLabel = new javax.swing.JLabel(); + pagePrevButton = new javax.swing.JButton(); + pageNextButton = new javax.swing.JButton(); + pageGotoPane = new javax.swing.JPanel(); + goToPageLabel = new javax.swing.JLabel(); + goToPageField = new javax.swing.JTextField(); + imagePane = new javax.swing.JPanel(); + imagesLabel = new javax.swing.JLabel(); + imagesRangeLabel = new javax.swing.JLabel(); thumbnailSizeComboBox = new javax.swing.JComboBox<>(); sortPane = new javax.swing.JPanel(); sortLabel = new javax.swing.JLabel(); @@ -141,6 +167,140 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { buttonBarPanel.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT)); + pagesPanel.setLayout(new java.awt.GridBagLayout()); + + pageNumberPane.setLayout(new java.awt.GridBagLayout()); + + pageLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + pageNumberPane.add(pageLabel, gridBagConstraints); + + pageNumLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageNumLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + pageNumberPane.add(pageNumLabel, gridBagConstraints); + + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + pagesPanel.add(pageNumberPane, gridBagConstraints); + + buttonBarPanel.add(pagesPanel); + + pageButtonPanel.setLayout(new java.awt.GridBagLayout()); + + pagesLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pagesLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + pageButtonPanel.add(pagesLabel, gridBagConstraints); + + pagePrevButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back.png"))); // NOI18N + pagePrevButton.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pagePrevButton.text")); // NOI18N + pagePrevButton.setBorder(null); + pagePrevButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_disabled.png"))); // NOI18N + pagePrevButton.setFocusable(false); + pagePrevButton.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); + pagePrevButton.setMargin(new java.awt.Insets(0, 0, 0, 0)); + pagePrevButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_back_hover.png"))); // NOI18N + pagePrevButton.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); + pagePrevButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + pagePrevButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + pageButtonPanel.add(pagePrevButton, gridBagConstraints); + + pageNextButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward.png"))); // NOI18N + pageNextButton.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.pageNextButton.text")); // NOI18N + pageNextButton.setBorder(null); + pageNextButton.setDisabledIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_disabled.png"))); // NOI18N + pageNextButton.setFocusable(false); + pageNextButton.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER); + pageNextButton.setMargin(new java.awt.Insets(0, 0, 0, 0)); + pageNextButton.setMaximumSize(new java.awt.Dimension(27, 23)); + pageNextButton.setMinimumSize(new java.awt.Dimension(27, 23)); + pageNextButton.setRolloverIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/corecomponents/btn_step_forward_hover.png"))); // NOI18N + pageNextButton.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM); + pageNextButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + pageNextButtonActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 2; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + pageButtonPanel.add(pageNextButton, gridBagConstraints); + + buttonBarPanel.add(pageButtonPanel); + + pageGotoPane.setLayout(new java.awt.GridBagLayout()); + + goToPageLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.goToPageLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 0; + gridBagConstraints.gridy = 0; + gridBagConstraints.fill = java.awt.GridBagConstraints.VERTICAL; + gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; + gridBagConstraints.weighty = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + pageGotoPane.add(goToPageLabel, gridBagConstraints); + + goToPageField.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.goToPageField.text")); // NOI18N + goToPageField.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + goToPageFieldActionPerformed(evt); + } + }); + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.gridx = 1; + gridBagConstraints.gridy = 0; + gridBagConstraints.gridheight = 2; + gridBagConstraints.ipadx = 75; + gridBagConstraints.anchor = java.awt.GridBagConstraints.FIRST_LINE_START; + gridBagConstraints.weightx = 1.0; + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + pageGotoPane.add(goToPageField, gridBagConstraints); + + buttonBarPanel.add(pageGotoPane); + + imagePane.setLayout(new java.awt.GridBagLayout()); + + imagesLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.imagesLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 9); + imagePane.add(imagesLabel, gridBagConstraints); + + imagesRangeLabel.setText(org.openide.util.NbBundle.getMessage(DataResultViewerThumbnail.class, "DataResultViewerThumbnail.imagesRangeLabel.text")); // NOI18N + gridBagConstraints = new java.awt.GridBagConstraints(); + gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 15); + imagePane.add(imagesRangeLabel, gridBagConstraints); + + buttonBarPanel.add(imagePane); + thumbnailSizeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { thumbnailSizeComboBoxActionPerformed(evt); @@ -181,6 +341,18 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { add(iconView, java.awt.BorderLayout.CENTER); }// //GEN-END:initComponents + private void pagePrevButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pagePrevButtonActionPerformed + previousPage(); + }//GEN-LAST:event_pagePrevButtonActionPerformed + + private void pageNextButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pageNextButtonActionPerformed + nextPage(); + }//GEN-LAST:event_pageNextButtonActionPerformed + + private void goToPageFieldActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_goToPageFieldActionPerformed + goToPage(goToPageField.getText()); + }//GEN-LAST:event_goToPageFieldActionPerformed + private void thumbnailSizeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_thumbnailSizeComboBoxActionPerformed int newIconSize; switch (thumbnailSizeComboBox.getSelectedIndex()) { @@ -199,14 +371,14 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { if (thumbSize != newIconSize) { thumbSize = newIconSize; Node root = this.getExplorerManager().getRootContext(); - this.rootNodeChildren.setThumbsSize(thumbSize); + ((ThumbnailViewChildren) root.getChildren()).setThumbsSize(thumbSize); // Temporarily set the explored context to the root, instead of a child node. // This is a workaround hack to convince org.openide.explorer.ExplorerManager to // update even though the new and old Node values are identical. This in turn // will cause the entire view to update completely. After this we // immediately set the node back to the current child by calling switchPage(). - this.getExplorerManager().setExploredContext(this.rootNode); + this.getExplorerManager().setExploredContext(root); switchPage(); } }//GEN-LAST:event_thumbnailSizeComboBoxActionPerformed @@ -253,7 +425,21 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JPanel buttonBarPanel; private javax.swing.JLabel filePathLabel; + private javax.swing.JTextField goToPageField; + private javax.swing.JLabel goToPageLabel; private org.openide.explorer.view.IconView iconView; + private javax.swing.JPanel imagePane; + private javax.swing.JLabel imagesLabel; + private javax.swing.JLabel imagesRangeLabel; + private javax.swing.JPanel pageButtonPanel; + private javax.swing.JPanel pageGotoPane; + private javax.swing.JLabel pageLabel; + private javax.swing.JButton pageNextButton; + private javax.swing.JLabel pageNumLabel; + private javax.swing.JPanel pageNumberPane; + private javax.swing.JButton pagePrevButton; + private javax.swing.JLabel pagesLabel; + private javax.swing.JPanel pagesPanel; private javax.swing.JButton sortButton; private javax.swing.JLabel sortLabel; private javax.swing.JPanel sortPane; @@ -265,7 +451,7 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { return (selectedNode != null); } - @Override + @Override public void setNode(Node givenNode) { setNode(givenNode, null); } @@ -273,7 +459,7 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { @Override public void setNode(Node givenNode, SearchResultsDTO searchResults) { // GVDTODO givenNode cannot be assumed to be a table filter node and search results needs to be captured. - + setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (selectionListener == null) { this.getExplorerManager().addPropertyChangeListener(new NodeSelectionListener()); @@ -289,19 +475,23 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { // case where the DataResultViewerThumbnail stands along from the // DataResultViewer. See DataResultViewer setNode for more information. if (givenNode != null && givenNode.getChildren().getNodesCount() > 0) { - + // GVDTODO this should be handled more elegantly - rootNode = (givenNode instanceof TableFilterNode) - ? (TableFilterNode) givenNode + rootNode = (givenNode instanceof TableFilterNode) + ? (TableFilterNode) givenNode : new TableFilterNode(givenNode, true); - + + /* * Wrap the given node in a ThumbnailViewChildren that will * produce ThumbnailPageNodes with ThumbnailViewNode children * from the child nodes of the given node. */ - rootNodeChildren = new ThumbnailViewChildren(rootNode, thumbSize); - final Node root = new AbstractNode(Children.create(rootNodeChildren, true)); + rootNodeChildren = new ThumbnailViewChildren(givenNode, thumbSize); + final Node root = new AbstractNode(rootNodeChildren); + + pageUpdater.setRoot(root); + root.addNodeListener(pageUpdater); this.getExplorerManager().setRootContext(root); } else { rootNode = null; @@ -328,7 +518,9 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { @Override public void resetComponent() { super.resetComponent(); - setNode(null); + this.totalPages = 0; + this.currentPage = -1; + currentPageImages = 0; updateControls(); } @@ -339,41 +531,117 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { super.clearComponent(); } + private void nextPage() { + if (currentPage < totalPages) { + currentPage++; + switchPage(); + } + } + + private void previousPage() { + if (currentPage > 1) { + currentPage--; + switchPage(); + } + } + + private void goToPage(String pageNumText) { + int newPage; + try { + newPage = Integer.parseInt(pageNumText); + } catch (NumberFormatException e) { + //ignore input + return; + } + + if (newPage > totalPages || newPage < 1) { + JOptionPane.showMessageDialog(this, + NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.goToPageTextField.msgDlg", totalPages), + NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.goToPageTextField.err"), + JOptionPane.WARNING_MESSAGE); + return; + } + + currentPage = newPage; + switchPage(); + } + private void switchPage() { - SwingUtilities.invokeLater(() -> { + + EventQueue.invokeLater(() -> { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - try { - ProgressHandle progress = ProgressHandle.createHandle( + }); + + //Note the nodes factories are likely creating nodes in EDT anyway, but worker still helps + new SwingWorker() { + private ProgressHandle progress; + + @Override + protected Object doInBackground() throws Exception { + pagePrevButton.setEnabled(false); + pageNextButton.setEnabled(false); + goToPageField.setEnabled(false); + progress = ProgressHandle.createHandle( NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.genThumbs")); progress.start(); progress.switchToIndeterminate(); - DataResultViewerThumbnail.this.rootNodeChildren.update(); + ExplorerManager explorerManager = DataResultViewerThumbnail.this.getExplorerManager(); + Node root = explorerManager.getRootContext(); + Node pageNode = root.getChildren().getNodeAt(currentPage - 1); + explorerManager.setExploredContext(pageNode); + currentPageImages = pageNode.getChildren().getNodesCount(); + return null; + } + + @Override + protected void done() { progress.finish(); - } catch (Exception ex) { - NotifyDescriptor d - = new NotifyDescriptor.Message( - NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.switchPage.done.errMsg", - ex.getMessage()), - NotifyDescriptor.ERROR_MESSAGE); - DialogDisplayer.getDefault().notify(d); - logger.log(Level.SEVERE, "Error making thumbnails: {0}", ex.getMessage()); //NON-NLS - } finally { setCursor(null); updateControls(); + // see if any exceptions were thrown + try { + get(); + } catch (InterruptedException | ExecutionException ex) { + NotifyDescriptor d + = new NotifyDescriptor.Message( + NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.switchPage.done.errMsg", + ex.getMessage()), + NotifyDescriptor.ERROR_MESSAGE); + DialogDisplayer.getDefault().notify(d); + logger.log(Level.SEVERE, "Error making thumbnails: {0}", ex.getMessage()); //NON-NLS + } + catch (java.util.concurrent.CancellationException ex) { + // catch and ignore if we were cancelled + } } - }); + }.execute(); + } @NbBundle.Messages({ "# {0} - sort criteria", "DataResultViewerThumbnail.sortLabel.textTemplate=Sorted by: {0}", "DataResultViewerThumbnail.sortLabel.text=Sorted by: ---"}) private void updateControls() { - if (rootNode != null && rootNode.getChildren().getNodesCount(true) > 0) { + if (totalPages == 0) { + pagePrevButton.setEnabled(false); + pageNextButton.setEnabled(false); + goToPageField.setEnabled(false); + pageNumLabel.setText(""); + imagesRangeLabel.setText(""); thumbnailSizeComboBox.setEnabled(false); sortButton.setEnabled(false); sortLabel.setText(DataResultViewerThumbnail_sortLabel_text()); } else { + pageNumLabel.setText(NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.pageNumbers.curOfTotal", + Integer.toString(currentPage), Integer.toString(totalPages))); + final int imagesFrom = (currentPage - 1) * ThumbnailViewChildren.IMAGES_PER_PAGE + 1; + final int imagesTo = currentPageImages + (currentPage - 1) * ThumbnailViewChildren.IMAGES_PER_PAGE; + imagesRangeLabel.setText(imagesFrom + "-" + imagesTo); + + pageNextButton.setEnabled(!(currentPage == totalPages)); + pagePrevButton.setEnabled(!(currentPage == 1)); + goToPageField.setEnabled(totalPages > 1); sortButton.setEnabled(true); thumbnailSizeComboBox.setEnabled(true); if (rootNode != null) { @@ -388,6 +656,88 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { } } + /** + * Listens for root change updates and updates the paging controls + */ + private class PageUpdater implements NodeListener { + + private Node root; + + void setRoot(Node root) { + this.root = root; + } + + @Override + public void propertyChange(PropertyChangeEvent evt) { + } + + @Override + public void childrenAdded(NodeMemberEvent nme) { + totalPages = root.getChildren().getNodesCount(); + + if (totalPages == 0) { + currentPage = -1; + updateControls(); + return; + } + + if (currentPage == -1 || currentPage > totalPages) { + currentPage = 1; + } + + //force load the curPage node + final Node pageNode = root.getChildren().getNodeAt(currentPage - 1); + + //em.setSelectedNodes(new Node[]{pageNode}); + if (pageNode != null) { + pageNode.addNodeListener(new NodeListener() { + @Override + public void childrenAdded(NodeMemberEvent nme) { + currentPageImages = pageNode.getChildren().getNodesCount(); + updateControls(); + } + + @Override + public void childrenRemoved(NodeMemberEvent nme) { + currentPageImages = 0; + updateControls(); + } + + @Override + public void childrenReordered(NodeReorderEvent nre) { + } + + @Override + public void nodeDestroyed(NodeEvent ne) { + } + + @Override + public void propertyChange(PropertyChangeEvent evt) { + } + }); + + DataResultViewerThumbnail.this.getExplorerManager().setExploredContext(pageNode); + } + + updateControls(); + } + + @Override + public void childrenRemoved(NodeMemberEvent nme) { + totalPages = 0; + currentPage = -1; + updateControls(); + } + + @Override + public void childrenReordered(NodeReorderEvent nre) { + } + + @Override + public void nodeDestroyed(NodeEvent ne) { + } + } + private class NodeSelectionListener implements PropertyChangeListener { @Override @@ -417,5 +767,5 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { } } } - } + } } diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java b/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java index 0e756133e1..5ded426270 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/ThumbnailViewChildren.java @@ -18,18 +18,16 @@ */ package org.sleuthkit.autopsy.corecomponents; +import com.google.common.collect.Lists; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.awt.Image; import java.awt.Toolkit; import java.lang.ref.SoftReference; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; @@ -38,16 +36,19 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; import java.util.logging.Level; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; import javax.swing.SwingUtilities; import javax.swing.Timer; import org.apache.commons.lang3.StringUtils; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; -import org.openide.nodes.ChildFactory; +import org.openide.nodes.AbstractNode; +import org.openide.nodes.Children; import org.openide.nodes.FilterNode; import org.openide.nodes.Node; import org.openide.util.NbBundle; +import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.SortCriterion; import static org.sleuthkit.autopsy.corecomponents.ResultViewerPersistence.loadSortCriteria; import org.sleuthkit.autopsy.coreutils.ImageUtils; @@ -64,72 +65,61 @@ import org.sleuthkit.datamodel.Content; * Filter-node like class, but adds additional hierarchy (pages) as parents of * the filtered nodes. */ -class ThumbnailViewChildren extends ChildFactory.Detachable { +class ThumbnailViewChildren extends Children.Keys { private static final Logger logger = Logger.getLogger(ThumbnailViewChildren.class.getName()); @NbBundle.Messages("ThumbnailViewChildren.progress.cancelling=(Cancelling)") private static final String CANCELLING_POSTIX = Bundle.ThumbnailViewChildren_progress_cancelling(); + static final int IMAGES_PER_PAGE = 200; private final ExecutorService executor = Executors.newFixedThreadPool(3, new ThreadFactoryBuilder().setNameFormat("Thumbnail-Loader-%d").build()); private final List tasks = new ArrayList<>(); private final Node parent; + private final List> pages = new ArrayList<>(); private int thumbSize; - private final Map nodeCache = new HashMap<>(); - - private final Object isSupportedLock = new Object(); - /** * The constructor * - * @param parent The node which is the parent of this children. + * @param parent The node which is the parent of this children. * @param thumbSize The hight and/or width of the thumbnails in pixels. */ ThumbnailViewChildren(Node parent, int thumbSize) { + super(true); //support lazy loading + this.parent = parent; this.thumbSize = thumbSize; } @Override - protected synchronized boolean createKeys(List toPopulate) { - List suppContent = Stream.of(parent.getChildren().getNodes()) - .filter(n -> isSupported(n)) - .sorted(getComparator()) - .collect(Collectors.toList()); + protected void addNotify() { + super.addNotify(); - List currNodeNames = suppContent.stream() - .map(nd -> nd.getName()) - .collect(Collectors.toList()); + /* + * TODO: When lazy loading of original nodes is fixed, we should be + * asking the datamodel for the children instead and not counting the + * children nodes (which might not be preloaded at this point). + */ + // get list of supported children sorted by persisted criteria + final List suppContent + = Stream.of(parent.getChildren().getNodes()) + .filter(ThumbnailViewChildren::isSupported) + .sorted(getComparator()) + .collect(Collectors.toList()); - // find set of keys that are no longer present with current createKeys call. - Set toRemove = new HashSet<>(nodeCache.keySet()); - currNodeNames.forEach((k) -> toRemove.remove(k)); + if (suppContent.isEmpty()) { + //if there are no images, there is nothing more to do + return; + } - // remove them from cache - toRemove.forEach((k) -> nodeCache.remove(k)); + //divide the supported content into buckets + pages.addAll(Lists.partition(suppContent, IMAGES_PER_PAGE)); - toPopulate.addAll(suppContent); - return true; - } - - @Override - protected Node createNodeForKey(Node key) { - ThumbnailViewNode retNode = new ThumbnailViewNode(key, this.thumbSize); - nodeCache.put(key.getName(), retNode); - return retNode; - } - - @Override - protected void removeNotify() { - super.removeNotify(); - nodeCache.clear(); - } - - void update() { - this.refresh(false); + //the keys are just the indices into the pages list. + setKeys(IntStream.range(0, pages.size()).boxed().collect(Collectors.toList())); } /** @@ -214,15 +204,21 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { return null; } - private boolean isSupported(Node node) { + @Override + protected void removeNotify() { + super.removeNotify(); + pages.clear(); + } + @Override + protected Node[] createNodes(Integer pageNum) { + return new Node[]{new ThumbnailPageNode(pageNum, pages.get(pageNum))}; + + } + + private static boolean isSupported(Node node) { if (node != null) { - Content content = null; - // this is to prevent dead-locking issue with simultaneous accesses. - synchronized (isSupportedLock) { - content = node.getLookup().lookup(AbstractFile.class); - } - + Content content = node.getLookup().lookup(AbstractFile.class); if (content != null) { return ImageUtils.thumbnailSupported(content); } @@ -232,9 +228,10 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { public void setThumbsSize(int thumbSize) { this.thumbSize = thumbSize; - for (ThumbnailViewNode node : nodeCache.values()) { - node.setThumbSize(thumbSize); - + for (Node page : getNodes()) { + for (Node node : page.getChildren().getNodes()) { + ((ThumbnailViewNode) node).setThumbSize(thumbSize); + } } } @@ -252,7 +249,6 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { return task; } else { return null; - } } @@ -277,7 +273,7 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { * The constructor * * @param wrappedNode The original node that this Node wraps. - * @param thumbSize The hight and/or width of the thumbnail in pixels. + * @param thumbSize The hight and/or width of the thumbnail in pixels. */ private ThumbnailViewNode(Node wrappedNode, int thumbSize) { super(wrappedNode, FilterNode.Children.LEAF); @@ -384,4 +380,66 @@ class ThumbnailViewChildren extends ChildFactory.Detachable { } } } + + /** + * Node representing a page of thumbnails, a parent of image nodes, with a + * name showing children range + */ + private class ThumbnailPageNode extends AbstractNode { + + private ThumbnailPageNode(Integer pageNum, List childNodes) { + + super(new ThumbnailPageNodeChildren(childNodes), Lookups.singleton(pageNum)); + setName(Integer.toString(pageNum + 1)); + int from = 1 + (pageNum * IMAGES_PER_PAGE); + int to = from + ((ThumbnailPageNodeChildren) getChildren()).getChildCount() - 1; + setDisplayName(from + "-" + to); + + this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/Folder-icon.png"); //NON-NLS + } + } + + /** + * Children.Keys implementation which uses nodes as keys, and wraps them in + * ThumbnailViewNodes as the child nodes. + * + */ + private class ThumbnailPageNodeChildren extends Children.Keys { + + /* + * wrapped original nodes + */ + private List keyNodes = null; + + ThumbnailPageNodeChildren(List keyNodes) { + super(true); + this.keyNodes = keyNodes; + } + + @Override + protected void addNotify() { + super.addNotify(); + setKeys(keyNodes); + } + + @Override + protected void removeNotify() { + super.removeNotify(); + setKeys(Collections.emptyList()); + } + + int getChildCount() { + return keyNodes.size(); + } + + @Override + protected Node[] createNodes(Node wrapped) { + if (wrapped != null) { + final ThumbnailViewNode thumb = new ThumbnailViewNode(wrapped, thumbSize); + return new Node[]{thumb}; + } else { + return new Node[]{}; + } + } + } } From a4475f7044456e241a57bd404fc9ad89ff78ee96 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 24 Nov 2021 10:46:21 -0500 Subject: [PATCH 083/142] fixes --- .../autopsy/mainui/datamodel/AnalysisResultDAO.java | 4 ++-- .../sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java | 4 ++-- .../org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java | 6 +++--- .../sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index fa5be343f9..43b595365e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -819,7 +819,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { ConcurrentMap, AnalysisResultTableSearchResultsDTO> arConcurrentMap = this.analysisResultCache.asMap(); arConcurrentMap.forEach((k, v) -> { BlackboardArtifactSearchParam searchParam = k.getParamData(); - Set dsIds = analysisResultMap.get(searchParam.getArtifactType().getTypeID()); + Set dsIds = analysisResultMap.get(searchParam.getArtifactType()); if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { arConcurrentMap.remove(k); } @@ -828,7 +828,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { ConcurrentMap, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap(); setConcurrentMap.forEach((k, v) -> { AnalysisResultSetSearchParam searchParam = k.getParamData(); - Set dsIds = resultsWithSetMap.get(Pair.of(searchParam.getArtifactType().getTypeID(), searchParam.getSetName())); + Set dsIds = resultsWithSetMap.get(Pair.of(searchParam.getArtifactType(), searchParam.getSetName())); if (dsIds != null && (searchParam.getDataSourceId() == null || dsIds.contains(searchParam.getDataSourceId()))) { arConcurrentMap.remove(k); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index c4fe361484..ec86aa7f30 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -184,7 +184,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { List processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); - if (evt == null) { + if (dataEvt == null) { return Collections.emptyList(); } @@ -210,7 +210,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { // invalidate cache entries that are affected by events ConcurrentMap, DataArtifactTableSearchResultsDTO> concurrentMap = this.dataArtifactCache.asMap(); concurrentMap.forEach((k, v) -> { - Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType().getTypeID()); + Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType()); if (dsIds != null) { Long searchDsId = k.getParamData().getDataSourceId(); if (searchDsId == null || dsIds.contains(searchDsId)) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 0f7318f26e..d593ae9b89 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -159,7 +159,7 @@ public class ViewsDAO extends AbstractDAO { } FileTypeExtensionsEvent extEvt = (FileTypeExtensionsEvent) eventData; - String extension = "." + extEvt.getExtension().toLowerCase(); + String extension = extEvt.getExtension().toLowerCase(); return key.getFilter().getFilter().contains(extension) && (key.getDataSourceId() == null || key.getDataSourceId() == extEvt.getDataSourceId()); } @@ -681,7 +681,7 @@ public class ViewsDAO extends AbstractDAO { // create an extension mapping if extension present if (!StringUtils.isBlank(af.getNameExtension())) { fileExtensionDsMap - .computeIfAbsent(af.getNameExtension(), (k) -> new HashSet<>()) + .computeIfAbsent("." + af.getNameExtension(), (k) -> new HashSet<>()) .add(af.getDataSourceObjectId()); } @@ -696,7 +696,7 @@ public class ViewsDAO extends AbstractDAO { // create a size mapping if size present FileSizeFilter sizeFilter = Stream.of(FileSizeFilter.values()) - .filter(filter -> af.getSize() >= filter.getMinBound() && af.getSize() < filter.getMaxBound()) + .filter(filter -> af.getSize() >= filter.getMinBound() && (filter.getMaxBound() == null || af.getSize() < filter.getMaxBound())) .findFirst() .orElse(null); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 61d0761614..990c2ac2f3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -47,7 +47,7 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable { - if (evt.getNewValue() instanceof DAOEvent) { + if (evt.getNewValue() instanceof DAOAggregateEvent) { DAOAggregateEvent aggEvt = (DAOAggregateEvent) evt.getNewValue(); for (DAOEvent daoEvt : aggEvt.getEvents()) { if (daoEvt instanceof TreeEvent) { From 8b365c1091c5a1e2bec85d5bab067d13dd6bdbef Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Wed, 24 Nov 2021 11:28:41 -0500 Subject: [PATCH 084/142] Moved thumbnail generation to SwingUtilities.invokeLater --- .../DataResultViewerThumbnail.java | 54 ++++++------------- 1 file changed, 17 insertions(+), 37 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java index c2a533bc2b..1de09c893c 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerThumbnail.java @@ -21,12 +21,10 @@ package org.sleuthkit.autopsy.corecomponents; import java.awt.Color; import java.awt.Cursor; import java.awt.Dialog; -import java.awt.EventQueue; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.logging.Level; import java.util.prefs.Preferences; @@ -34,7 +32,7 @@ import java.util.stream.Collectors; import javax.swing.JOptionPane; import javax.swing.ListSelectionModel; import javax.swing.SortOrder; -import javax.swing.SwingWorker; +import javax.swing.SwingUtilities; import org.apache.commons.lang3.StringUtils; import org.netbeans.api.progress.ProgressHandle; import org.openide.DialogDescriptor; @@ -566,56 +564,38 @@ public final class DataResultViewerThumbnail extends AbstractDataResultViewer { switchPage(); } - private void switchPage() { - - EventQueue.invokeLater(() -> { + private void switchPage() { + SwingUtilities.invokeLater(() -> { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - }); - - //Note the nodes factories are likely creating nodes in EDT anyway, but worker still helps - new SwingWorker() { - private ProgressHandle progress; - - @Override - protected Object doInBackground() throws Exception { + try { pagePrevButton.setEnabled(false); pageNextButton.setEnabled(false); goToPageField.setEnabled(false); - progress = ProgressHandle.createHandle( + ProgressHandle progress = ProgressHandle.createHandle( NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.genThumbs")); progress.start(); progress.switchToIndeterminate(); + ExplorerManager explorerManager = DataResultViewerThumbnail.this.getExplorerManager(); Node root = explorerManager.getRootContext(); Node pageNode = root.getChildren().getNodeAt(currentPage - 1); explorerManager.setExploredContext(pageNode); currentPageImages = pageNode.getChildren().getNodesCount(); - return null; - } - - @Override - protected void done() { + progress.finish(); + } catch (Exception ex) { + NotifyDescriptor d + = new NotifyDescriptor.Message( + NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.switchPage.done.errMsg", + ex.getMessage()), + NotifyDescriptor.ERROR_MESSAGE); + DialogDisplayer.getDefault().notify(d); + logger.log(Level.SEVERE, "Error making thumbnails: {0}", ex.getMessage()); //NON-NLS + } finally { setCursor(null); updateControls(); - // see if any exceptions were thrown - try { - get(); - } catch (InterruptedException | ExecutionException ex) { - NotifyDescriptor d - = new NotifyDescriptor.Message( - NbBundle.getMessage(this.getClass(), "DataResultViewerThumbnail.switchPage.done.errMsg", - ex.getMessage()), - NotifyDescriptor.ERROR_MESSAGE); - DialogDisplayer.getDefault().notify(d); - logger.log(Level.SEVERE, "Error making thumbnails: {0}", ex.getMessage()); //NON-NLS - } - catch (java.util.concurrent.CancellationException ex) { - // catch and ignore if we were cancelled - } } - }.execute(); - + }); } @NbBundle.Messages({ From 70ffd4ae11b7c573b27ed7d996200ef2fc3d8da7 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 24 Nov 2021 11:35:17 -0500 Subject: [PATCH 085/142] 8168 capture IngestJobContext reference in GPX module --- InternalPythonModules/GPX_Module/GPX_Parser_Module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/InternalPythonModules/GPX_Module/GPX_Parser_Module.py b/InternalPythonModules/GPX_Module/GPX_Parser_Module.py index 375652b6c4..a1deaa447e 100644 --- a/InternalPythonModules/GPX_Module/GPX_Parser_Module.py +++ b/InternalPythonModules/GPX_Module/GPX_Parser_Module.py @@ -134,7 +134,7 @@ class GPXParserFileIngestModule(FileIngestModule): # Create a GeoArtifactsHelper for this file. geoArtifactHelper = GeoArtifactsHelper( - self.skCase, self.moduleName, None, file, context.getJobId()) + self.skCase, self.moduleName, None, file, self.context.getJobId()) if self.writeDebugMsgs: self.log(Level.INFO, "Processing " + file.getUniquePath() + @@ -213,7 +213,7 @@ class GPXParserFileIngestModule(FileIngestModule): art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes) - self.blackboard.postArtifact(art, self.moduleName, context.getJobId()) + self.blackboard.postArtifact(art, self.moduleName, self.context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " + From 39377e27c1979ef11db4d66e28da9e805eb33df5 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 24 Nov 2021 11:55:00 -0500 Subject: [PATCH 086/142] fixes --- .../autopsy/mainui/datamodel/MainDAO.java | 114 ++++++++++++------ .../datamodel/events/TreeEventTimedCache.java | 10 +- 2 files changed, 79 insertions(+), 45 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index c53d9dd9ca..4ca660a9e1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -33,6 +33,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.logging.Level; import java.util.prefs.PreferenceChangeListener; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -89,13 +90,18 @@ public class MainDAO extends AbstractDAO { * The case event listener. */ private final PropertyChangeListener caseEventListener = (evt) -> { - if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { - this.clearCaches(); - } else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) { - handleEvent(evt, false); - } else { - // handle case events immediately - handleEvent(evt, true); + try { + if (evt.getPropertyName().equals(Case.Events.CURRENT_CASE.toString())) { + this.clearCaches(); + } else if (QUEUED_CASE_EVENTS.contains(evt.getPropertyName())) { + handleEvent(evt, false); + } else { + // handle case events immediately + handleEvent(evt, true); + } + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling case events", ex); } }; @@ -103,21 +109,38 @@ public class MainDAO extends AbstractDAO { * The user preference listener. */ private final PreferenceChangeListener userPreferenceListener = (evt) -> { - this.clearCaches(); + try { + this.clearCaches(); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling user preference change", ex); + } + }; /** * The ingest module event listener. */ private final PropertyChangeListener ingestModuleEventListener = (evt) -> { - handleEvent(evt, false); + try { + handleEvent(evt, false); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling ingest module event", ex); + } }; /** * The ingest job event listener. */ private final PropertyChangeListener ingestJobEventListener = (evt) -> { - handleEventFlush(); + try { + handleEventFlush(); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling ingest job event", ex); + } + }; private final ScheduledThreadPoolExecutor timeoutExecutor @@ -128,7 +151,15 @@ public class MainDAO extends AbstractDAO { private final PropertyChangeManager treeEventsManager = new PropertyChangeManager(); private final DAOEventBatcher eventBatcher = new DAOEventBatcher<>( - (evts) -> fireResultEvts(evts), RESULT_BATCH_MILLIS); + (evts) -> { + try { + fireResultEvts(evts); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling batched dao events", ex); + } + }, + RESULT_BATCH_MILLIS); private final DataArtifactDAO dataArtifactDAO = DataArtifactDAO.getInstance(); private final AnalysisResultDAO analysisResultDAO = AnalysisResultDAO.getInstance(); @@ -148,6 +179,40 @@ public class MainDAO extends AbstractDAO { osAccountsDAO, commAccountsDAO); + /** + * Registers listeners with autopsy event publishers and starts internal + * threads. + */ + void init() { + IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); + Case.addPropertyChangeListener(caseEventListener); + UserPreferences.addChangeListener(userPreferenceListener); + + this.timeoutExecutor.scheduleAtFixedRate( + () -> { + try { + handleTreeEventTimeouts(); + } catch (Throwable ex) { + // firewall exception + logger.log(Level.WARNING, "An exception occurred while handling tree event timeouts", ex); + } + }, + WATCH_RESOLUTION_MILLIS, + WATCH_RESOLUTION_MILLIS, + TimeUnit.MILLISECONDS); + } + + /** + * Unregisters listeners from autopsy event publishers. + */ + void unregister() { + IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); + IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); + Case.removePropertyChangeListener(caseEventListener); + UserPreferences.removeChangeListener(userPreferenceListener); + } + public DataArtifactDAO getDataArtifactsDAO() { return dataArtifactDAO; } @@ -261,38 +326,11 @@ public class MainDAO extends AbstractDAO { fireTreeEvts(this.shouldRefreshTree()); } - /** - * Registers listeners with autopsy event publishers and starts internal - * threads. - */ - void init() { - IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); - IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); - Case.addPropertyChangeListener(caseEventListener); - UserPreferences.addChangeListener(userPreferenceListener); - - this.timeoutExecutor.scheduleAtFixedRate( - () -> handleTreeEventTimeouts(), - WATCH_RESOLUTION_MILLIS, - WATCH_RESOLUTION_MILLIS, - TimeUnit.MILLISECONDS); - } - @Override protected void finalize() throws Throwable { unregister(); } - /** - * Unregisters listeners from autopsy event publishers. - */ - void unregister() { - IngestManager.getInstance().removeIngestModuleEventListener(INGEST_MODULE_EVENTS, ingestModuleEventListener); - IngestManager.getInstance().removeIngestJobEventListener(INGEST_JOB_EVENTS, ingestJobEventListener); - Case.removePropertyChangeListener(caseEventListener); - UserPreferences.removeChangeListener(userPreferenceListener); - } - /** * A wrapper around property change support that exposes * addPropertyChangeListener and removePropertyChangeListener so that diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java index 94aa1ed64f..55577d01ab 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java @@ -74,14 +74,10 @@ public class TreeEventTimedCache { public Collection getEventTimeouts() { long curTime = getCurTime(); - List toUpdate = new ArrayList<>(); + List toUpdate; synchronized (this.timeoutLock) { - this.eventTimeouts.forEach((k, v) -> { - if (v >= curTime) { - toUpdate.add(k); - this.eventTimeouts.remove(k); - } - }); + toUpdate = new ArrayList<>(this.eventTimeouts.keySet()); + this.eventTimeouts.keySet().removeAll(toUpdate); } return toUpdate; } From e9c3a5a027fb001d8477fa882635cdf5213644d9 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 24 Nov 2021 12:24:08 -0500 Subject: [PATCH 087/142] fixes --- .../mainui/datamodel/AnalysisResultDAO.java | 199 +++++++++--------- .../mainui/datamodel/DataArtifactDAO.java | 5 +- .../autopsy/mainui/datamodel/MainDAO.java | 9 +- .../datamodel/events/TreeEventTimedCache.java | 7 +- 4 files changed, 118 insertions(+), 102 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 43b595365e..2fbdbf2942 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -75,11 +75,11 @@ import org.sleuthkit.datamodel.VolumeSystem; * DAO for providing data about analysis results to populate the results viewer. */ public class AnalysisResultDAO extends BlackboardArtifactDAO { - + private static Logger logger = Logger.getLogger(AnalysisResultDAO.class.getName()); - + private static AnalysisResultDAO instance = null; - + @NbBundle.Messages({ "AnalysisResultDAO.columnKeys.score.name=Score", "AnalysisResultDAO.columnKeys.score.displayName=Score", @@ -102,31 +102,31 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { Bundle.AnalysisResultDAO_columnKeys_score_displayName(), Bundle.AnalysisResultDAO_columnKeys_score_description() ); - + static final ColumnKey CONCLUSION_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_conclusion_name(), Bundle.AnalysisResultDAO_columnKeys_conclusion_displayName(), Bundle.AnalysisResultDAO_columnKeys_conclusion_description() ); - + static final ColumnKey CONFIGURATION_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_configuration_name(), Bundle.AnalysisResultDAO_columnKeys_configuration_displayName(), Bundle.AnalysisResultDAO_columnKeys_configuration_description() ); - + static final ColumnKey JUSTIFICATION_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_justification_name(), Bundle.AnalysisResultDAO_columnKeys_justification_displayName(), Bundle.AnalysisResultDAO_columnKeys_justification_description() ); - + static final ColumnKey SOURCE_TYPE_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_sourceType_name(), Bundle.AnalysisResultDAO_columnKeys_sourceType_displayName(), Bundle.AnalysisResultDAO_columnKeys_sourceType_description() ); - + synchronized static AnalysisResultDAO getInstance() { if (instance == null) { instance = new AnalysisResultDAO(); @@ -140,7 +140,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public static Set getIgnoredTreeTypes() { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - + @SuppressWarnings("deprecation") private static final Set STANDARD_SET_TYPES = ImmutableSet.of( BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID(), @@ -153,15 +153,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { private final Cache, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); - + private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); - + private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); - + List arts = new ArrayList<>(); String pagedWhereClause = getWhereClause(cacheKey); arts.addAll(blackboard.getAnalysisResultsWhere(pagedWhereClause)); @@ -169,16 +169,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // Get total number of results long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + TableData tableData = createTableData(artType, arts); return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } - + private AnalysisResultTableSearchResultsDTO fetchSetNameHitsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); - + Long dataSourceId = cacheKey.getParamData().getDataSourceId(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); @@ -187,9 +187,9 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null) { originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " "; } - + String expectedSetName = cacheKey.getParamData().getSetName(); - + List allHashHits = new ArrayList<>(); allHashHits.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause)); blackboard.loadBlackboardAttributes(allHashHits); @@ -203,12 +203,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { arts.add(art); } } - + List pagedArtifacts = getPaged(arts, cacheKey); TableData tableData = createTableData(artType, pagedArtifacts); return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size()); } - + @Override void addAnalysisResultColumnKeys(List columnKeys) { // Make sure these are in the same order as in addAnalysisResultFields() @@ -218,7 +218,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { columnKeys.add(CONFIGURATION_COL); columnKeys.add(JUSTIFICATION_COL); } - + @Override void addAnalysisResultFields(BlackboardArtifact artifact, List cells) throws TskCoreException { if (!(artifact instanceof AnalysisResult)) { @@ -265,7 +265,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } return ""; } - + @Override RowDTO createRow(BlackboardArtifact artifact, Content srcContent, Content linkedFile, boolean isTimelineSupported, List cellValues, long id) throws IllegalArgumentException { if (!(artifact instanceof AnalysisResult)) { @@ -273,36 +273,36 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } return new AnalysisResultRowDTO((AnalysisResult) artifact, srcContent, isTimelineSupported, cellValues, id); } - + public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); - + if (artType == null || artType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT || (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0)) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Artifact type must be non-null and analysis result. Data source id must be null or > 0. " + "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); } - + private boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) { if (!(eventData instanceof AnalysisResultEvent)) { return false; } - + AnalysisResultEvent analysisResultEvt = (AnalysisResultEvent) eventData; return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactType().getTypeID() && (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId()); } - + private boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) { if (!(event instanceof AnalysisResultSetEvent)) { return false; } - + AnalysisResultSetEvent setEvent = (AnalysisResultSetEvent) event; return isAnalysisResultsInvalidating((AnalysisResultSearchParam) key, (AnalysisResultEvent) setEvent) && Objects.equals(key.getSetName(), setEvent.getSetName()); @@ -315,7 +315,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + "Data source id must be null or > 0. " + "Received data source id: {0}", artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); return setHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams)); } @@ -328,19 +328,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + "Data source id must be null or > 0. " + "Received data source id: {0}", artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); return keywordHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams)); } - + public void dropAnalysisResultCache() { analysisResultCache.invalidateAll(); } - + public void dropHashHitCache() { setHitCache.invalidateAll(); } - + public void dropKeywordHitCache() { keywordHitCache.invalidateAll(); } @@ -367,12 +367,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // return results return new TreeResultsDTO<>(treeItemRows); - + } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching analysis result counts.", ex); } } - + private TreeItemDTO getTreeItem(BlackboardArtifact.Type type, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeItemDTO<>( BlackboardArtifact.Category.ANALYSIS_RESULT.name(), @@ -399,7 +399,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null && dataSourceId <= 0) { throw new IllegalArgumentException("Expected data source id to be > 0"); } - + try { // get artifact types and counts SleuthkitCase skCase = getCase(); @@ -414,7 +414,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + ((dataSourceId == null) ? "" : " AND art.data_source_obj_id = " + dataSourceId + " \n") + ") res \n" + "GROUP BY res.set_name"; - + Map setCounts = new HashMap<>(); skCase.getCaseDbAccessManager().select(query, (resultSet) -> { try { @@ -427,7 +427,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { logger.log(Level.WARNING, "An error occurred while fetching set name counts.", ex); } }); - + return setCounts; } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching set counts", ex); @@ -457,7 +457,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { BlackboardArtifact.Type type, Long dataSourceId, String nullSetName) throws IllegalArgumentException, ExecutionException { - + List> allSets = getSetCountsMap(type, BlackboardAttribute.Type.TSK_SET_NAME, dataSourceId).entrySet().stream() .filter(entry -> nullSetName != null || entry.getKey() != null) @@ -470,13 +470,13 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { TreeDisplayCount.getDeterminate(entry.getValue())); }) .collect(Collectors.toList()); - + return new TreeResultsDTO<>(allSets); } - + private TreeItemDTO getSetTreeItem(BlackboardArtifact.Type type, Long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) { - + return new TreeItemDTO<>( type.getTypeName(), new AnalysisResultSetSearchParam(type, dataSourceId, setName), @@ -527,15 +527,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null && dataSourceId <= 0) { throw new IllegalArgumentException("Expected data source id to be > 0"); } - + String dataSourceClause = dataSourceId == null ? "" : "AND art.data_source_obj_id = ?\n"; - + String setNameClause = setName == null ? "attr_res.set_name IS NULL" : "attr_res.set_name = ?"; - + String query = "res.search_term,\n" + " res.search_type,\n" + " SUM(res.count) AS count,\n" @@ -580,16 +580,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // get artifact types and counts try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) { - + int paramIdx = 0; if (dataSourceId != null) { preparedStatement.setLong(++paramIdx, dataSourceId); } - + if (setName != null) { preparedStatement.setString(++paramIdx, setName); } - + List> items = new ArrayList<>(); getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> { try { @@ -598,7 +598,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { int searchType = resultSet.getInt("search_type"); long count = resultSet.getLong("count"); boolean hasChildren = resultSet.getBoolean("has_children"); - + String searchTermModified; switch (searchType) { case 0: @@ -615,7 +615,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { searchTermModified = searchTerm; break; } - + TreeItemDTO treeItem = new TreeItemDTO<>( "KEYWORD_SEARCH_TERMS", new KeywordSearchTermParams(setName, searchTerm, searchType, hasChildren, dataSourceId), @@ -623,16 +623,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { searchTermModified, TreeDisplayCount.getDeterminate(count) ); - + items.add(treeItem); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); } }); - + return new TreeResultsDTO<>(items); - + } catch (SQLException | NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching set counts", ex); } @@ -656,15 +656,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null && dataSourceId <= 0) { throw new IllegalArgumentException("Expected data source id to be > 0"); } - + String dataSourceClause = dataSourceId == null ? "" : "AND data_source_obj_id = ?\n"; - + String setNameClause = setName == null ? "res.set_name IS NULL" : "res.set_name = ?"; - + String query = "keyword, \n" + " COUNT(*) AS count \n" + "FROM (\n" @@ -686,28 +686,28 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + "AND res.regexp_str = ?\n" + "AND res.search_type = ?\n" + "GROUP BY keyword"; - + try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) { // get artifact types and counts int paramIdx = 0; if (dataSourceId != null) { preparedStatement.setLong(++paramIdx, dataSourceId); } - + if (setName != null) { preparedStatement.setString(++paramIdx, setName); } - + preparedStatement.setString(++paramIdx, regexStr); preparedStatement.setInt(++paramIdx, searchType); - + List> items = new ArrayList<>(); getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> { try { while (resultSet.next()) { String keyword = resultSet.getString("keyword"); long count = resultSet.getLong("count"); - + items.add(new TreeItemDTO<>( "KEYWORD_MATCH", new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId), @@ -719,13 +719,13 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); } }); - + return new TreeResultsDTO<>(items); } catch (NoCurrentCaseException | TskCoreException | SQLException ex) { throw new ExecutionException("An error occurred while fetching keyword counts", ex); } } - + @Override void clearCaches() { this.analysisResultCache.invalidateAll(); @@ -733,14 +733,14 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { this.setHitCache.invalidateAll(); this.flushEvents(); } - + @Override Collection processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. Map> analysisResultMap = new HashMap<>(); Map, Set> setMap = new HashMap<>(); Map> keywordHitsMap = new HashMap<>(); - + ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); if (dataEvt != null) { for (BlackboardArtifact art : dataEvt.getArtifacts()) { @@ -752,7 +752,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { String setName = setAttr == null ? null : setAttr.getValueString(); setMap.computeIfAbsent(Pair.of(art.getType(), setName), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); - + } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { analysisResultMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); @@ -767,11 +767,14 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { return Collections.emptyList(); } - + clearRelevantCacheEntries(analysisResultMap, setMap); - + List daoEvents = getDAOEvents(analysisResultMap, setMap); - Collection treeEvents = this.treeCache.enqueueAll(daoEvents); + Collection treeEvents = this.treeCache.enqueueAll(daoEvents).stream() + .map(arEvt -> getTreeEvent(arEvt, false)) + .collect(Collectors.toList()); + return Stream.of(daoEvents, treeEvents) .flatMap(lst -> lst.stream()) .collect(Collectors.toList()); @@ -794,7 +797,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // invalidate cache entries that are affected by events Stream analysisResultEvts = analysisResultMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); - + Stream analysisResultSetEvts = resultsWithSetMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId))); @@ -824,7 +827,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { arConcurrentMap.remove(k); } }); - + ConcurrentMap, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap(); setConcurrentMap.forEach((k, v) -> { AnalysisResultSetSearchParam searchParam = k.getParamData(); @@ -837,26 +840,30 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // GVDTODO handle clearing cache for keyword search hits // private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); } - + + private TreeEvent getTreeEvent(AnalysisResultEvent arEvt, boolean shouldRefresh) { + // GVDTODO handle keyword items when integrated + if (arEvt instanceof AnalysisResultSetEvent) { + AnalysisResultSetEvent setEvt = (AnalysisResultSetEvent) arEvt; + return new TreeEvent(getSetTreeItem(setEvt.getArtifactType(), setEvt.getDataSourceId(), + setEvt.getSetName(), setEvt.getSetName() == null ? "" : setEvt.getSetName(), + TreeDisplayCount.INDETERMINATE), shouldRefresh); + } else { + return new TreeEvent(getTreeItem(arEvt.getArtifactType(), arEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), shouldRefresh); + } + } + @Override Collection flushEvents() { - return this.treeCache.flushEvents(); + return this.treeCache.flushEvents().stream() + .map(arEvt -> getTreeEvent(arEvt, true)) + .collect(Collectors.toList()); } - + @Override Collection shouldRefreshTree() { return this.treeCache.getEventTimeouts().stream() - .map(daoEvt -> { - // GVDTODO handle keyword items when integrated - if (daoEvt instanceof AnalysisResultSetEvent) { - AnalysisResultSetEvent setEvt = (AnalysisResultSetEvent) daoEvt; - return new TreeEvent(getSetTreeItem(setEvt.getArtifactType(), setEvt.getDataSourceId(), - setEvt.getSetName(), setEvt.getSetName() == null ? "" : setEvt.getSetName(), - TreeDisplayCount.INDETERMINATE), false); - } else { - return new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false); - } - }) + .map(arEvt -> getTreeEvent(arEvt, true)) .collect(Collectors.toList()); } @@ -873,16 +880,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public AnalysisResultFetcher(AnalysisResultSearchParam params) { super(params); } - + protected AnalysisResultDAO getDAO() { return MainDAO.getInstance().getAnalysisResultDAO(); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { return getDAO().isAnalysisResultsInvalidating(this.getParameters(), evt); @@ -902,16 +909,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public AnalysisResultSetFetcher(AnalysisResultSetSearchParam params) { super(params); } - + protected AnalysisResultDAO getDAO() { return MainDAO.getInstance().getAnalysisResultDAO(); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { return getDAO().isAnalysisResultsSetInvalidating(this.getParameters(), evt); @@ -931,16 +938,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public KeywordHitResultFetcher(KeywordHitSearchParam params) { super(params); } - + protected AnalysisResultDAO getDAO() { return MainDAO.getInstance().getAnalysisResultDAO(); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { // GVDTODO diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index ec86aa7f30..b97311f4fc 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -249,7 +249,9 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override Collection flushEvents() { - return this.treeCache.flushEvents(); + return this.treeCache.flushEvents().stream() + .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) + .collect(Collectors.toList()); } @Override @@ -259,6 +261,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .collect(Collectors.toList()); } + /* * Handles fetching and paging of data artifacts. */ diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 4ca660a9e1..1afa1b868a 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -272,12 +272,13 @@ public class MainDAO extends AbstractDAO { @Override Collection flushEvents() { - Stream> daoStreamEvts = allDAOs.stream() - .map((subDAO) -> subDAO.flushEvents()); + List> daoStreamEvts = allDAOs.stream() + .map((subDAO) -> subDAO.flushEvents()) + .collect(Collectors.toList()); - Collection batchFlushedEvts = eventBatcher.flushEvents(); + daoStreamEvts.add(eventBatcher.flushEvents()); - return Stream.concat(daoStreamEvts, Stream.of(batchFlushedEvts)) + return daoStreamEvts.stream() .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) .collect(Collectors.toList()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java index 55577d01ab..3a60166006 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java @@ -25,6 +25,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; public class TreeEventTimedCache { private static final long DEFAULT_TIMEOUT_MILLIS = 2 * 60 * 1000; @@ -76,7 +77,11 @@ public class TreeEventTimedCache { long curTime = getCurTime(); List toUpdate; synchronized (this.timeoutLock) { - toUpdate = new ArrayList<>(this.eventTimeouts.keySet()); + toUpdate = this.eventTimeouts.entrySet().stream() + .filter(e -> e.getValue() < curTime) + .map(e -> e.getKey()) + .collect(Collectors.toList()); + this.eventTimeouts.keySet().removeAll(toUpdate); } return toUpdate; From 2169cda0fd3eb1f07308603197a0df31d5aaa52e Mon Sep 17 00:00:00 2001 From: apriestman Date: Wed, 24 Nov 2021 13:01:39 -0500 Subject: [PATCH 088/142] Renamed method and expanded comments. --- .../mainui/datamodel/FileSystemColumnUtils.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java index 476645984c..0f95583eaa 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemColumnUtils.java @@ -151,9 +151,10 @@ class FileSystemColumnUtils { * * @param content The Content object. * - * @return The type corresponding to the content; UNSUPPORTED if the content will not be displayed + * @return The type corresponding to the content; UNSUPPORTED if the + * content will not be displayed in the file system section of the tree. */ - private static ContentType getContentType(Content content) { + private static ContentType getDisplayableContentType(Content content) { if (content instanceof Image) { return ContentType.IMAGE; } else if (content instanceof Volume) { @@ -167,9 +168,11 @@ class FileSystemColumnUtils { } /** - * Check whether a given content object should be displayed. + * Check whether a given content object should be displayed in the + * file system section of the tree. * We can display an object if ContentType is not UNSUPPORTED - * and if it is not the root directory. + * and if it is not the root directory. We can not display + * file systems, volume systems, artifacts, etc. * * @param content The content. * @@ -185,7 +188,7 @@ class FileSystemColumnUtils { } return ! ((AbstractFile)content).isRoot(); } - return (getContentType(content) != ContentType.UNSUPPORTED); + return (getDisplayableContentType(content) != ContentType.UNSUPPORTED); } /** @@ -200,7 +203,7 @@ class FileSystemColumnUtils { static List getDisplayableTypesForContentList(List contentList) { List displayableTypes = new ArrayList<>(); for (Content content : contentList) { - ContentType type = getContentType(content); + ContentType type = getDisplayableContentType(content); if (type != ContentType.UNSUPPORTED && ! displayableTypes.contains(type)) { displayableTypes.add(type); } From 8798cd68b41dca661a9f1ae4dee522a185426b25 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Wed, 24 Nov 2021 14:27:57 -0500 Subject: [PATCH 089/142] Update AnalysisResultSetEvent.java --- .../mainui/datamodel/events/AnalysisResultSetEvent.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java index 67cf87cd82..c0bdf9e90b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AnalysisResultSetEvent.java @@ -21,8 +21,8 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import org.sleuthkit.datamodel.BlackboardArtifact; /** - * An event for an artifact added or changed of a particular type possibly for a - * particular data source. + * An event for an Analysis Result that is organized by Set names to + * signal that one has been added or removed on a given data source. */ public class AnalysisResultSetEvent extends AnalysisResultEvent { private final String setName; From cef8b0ed38eaee48111b1b98ca9b1bd190bbe3f3 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Wed, 24 Nov 2021 14:29:37 -0500 Subject: [PATCH 091/142] Update BlackboardArtifactEvent.java --- .../mainui/datamodel/events/BlackboardArtifactEvent.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java index b5bbcec03e..d2e3eac2b4 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java @@ -22,7 +22,8 @@ import java.util.Objects; import org.sleuthkit.datamodel.BlackboardArtifact; /** - * An event for an artifact added in a particular type. + * A base class for DataArtifact and AnalysisResult events to signal that one + * has been added or removed. */ public class BlackboardArtifactEvent implements DAOEvent { private final BlackboardArtifact.Type artifactType; From b113288a1367bffdd77a6a0f85c19eb85d7d3f18 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 24 Nov 2021 14:44:25 -0500 Subject: [PATCH 092/142] updates --- .../corecomponents/DataResultPanel.java | 16 +- .../autopsy/mainui/datamodel/AbstractDAO.java | 5 +- .../mainui/datamodel/AnalysisResultDAO.java | 203 +++++++++--------- .../mainui/datamodel/CommAccountsDAO.java | 2 +- .../mainui/datamodel/DataArtifactDAO.java | 18 +- .../mainui/datamodel/FileSystemDAO.java | 4 +- .../autopsy/mainui/datamodel/MainDAO.java | 18 +- .../mainui/datamodel/OsAccountsDAO.java | 2 +- .../autopsy/mainui/datamodel/TagsDAO.java | 2 +- .../autopsy/mainui/datamodel/ViewsDAO.java | 2 +- .../events/BlackboardArtifactEvent.java | 2 +- .../datamodel/events/DAOEventUtils.java | 24 ++- ...eeEventTimedCache.java => TreeCounts.java} | 66 +++++- 13 files changed, 220 insertions(+), 144 deletions(-) rename Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/{TreeEventTimedCache.java => TreeCounts.java} (59%) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 67321393c9..649f304356 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -150,8 +150,11 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C private final PropertyChangeListener caseEventListener = evt -> { String evtName = evt.getPropertyName(); - if (Case.Events.CURRENT_CASE.toString().equals(evtName) && evt.getNewValue() == null) { - nodeNameToPageCountListenerMap.clear(); + if (Case.Events.CURRENT_CASE.toString().equals(evtName)) { + searchResultManager = null; + if (evt.getNewValue() == null) { + nodeNameToPageCountListenerMap.clear(); + } } }; @@ -163,7 +166,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C IngestManager.IngestModuleEvent.DATA_ADDED); private final MainDAO mainDAO = MainDAO.getInstance(); - + private final PropertyChangeListener DAOListener = evt -> { SearchManager manager = this.searchResultManager; if (manager != null && evt != null && evt.getNewValue() instanceof DAOAggregateEvent) { @@ -489,7 +492,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C // if search result root node, it's fine; otherwise, wrap in result // viewer filter node to make sure there are no grandchildren - this.currentRootNode = (rootNode instanceof SearchResultRootNode) + this.currentRootNode = (rootNode instanceof SearchResultRootNode) ? rootNode : new ResultViewerFilterParentNode(rootNode); @@ -1298,7 +1301,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ex); } } - + /** * Displays results of querying the DAO for the given search parameters * query. @@ -1338,7 +1341,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ex); } } - + /** * Displays results of querying the DAO for the given search parameters * query. @@ -1490,6 +1493,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C /** * Main constructor. + * * @param original The original node to wrap. */ ResultViewerFilterParentNode(Node original) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index 801f0db3b6..432182778e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -47,11 +47,12 @@ abstract class AbstractDAO { abstract Collection processEvent(PropertyChangeEvent evt); /** - * Any events that are delayed or batched are flushed and returned. + * Handles the ingest complete or cancelled event. Any events that are + * delayed or batched are flushed and returned. * * @return The flushed events that were delayed and batched. */ - abstract Collection flushEvents(); + abstract Collection handleIngestComplete(); /** * Returns any categories that require a tree refresh. For instance, if web diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 2fbdbf2942..5de95de49f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -52,7 +52,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; -import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimedCache; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AnalysisResult; @@ -75,11 +75,11 @@ import org.sleuthkit.datamodel.VolumeSystem; * DAO for providing data about analysis results to populate the results viewer. */ public class AnalysisResultDAO extends BlackboardArtifactDAO { - + private static Logger logger = Logger.getLogger(AnalysisResultDAO.class.getName()); - + private static AnalysisResultDAO instance = null; - + @NbBundle.Messages({ "AnalysisResultDAO.columnKeys.score.name=Score", "AnalysisResultDAO.columnKeys.score.displayName=Score", @@ -102,31 +102,31 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { Bundle.AnalysisResultDAO_columnKeys_score_displayName(), Bundle.AnalysisResultDAO_columnKeys_score_description() ); - + static final ColumnKey CONCLUSION_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_conclusion_name(), Bundle.AnalysisResultDAO_columnKeys_conclusion_displayName(), Bundle.AnalysisResultDAO_columnKeys_conclusion_description() ); - + static final ColumnKey CONFIGURATION_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_configuration_name(), Bundle.AnalysisResultDAO_columnKeys_configuration_displayName(), Bundle.AnalysisResultDAO_columnKeys_configuration_description() ); - + static final ColumnKey JUSTIFICATION_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_justification_name(), Bundle.AnalysisResultDAO_columnKeys_justification_displayName(), Bundle.AnalysisResultDAO_columnKeys_justification_description() ); - + static final ColumnKey SOURCE_TYPE_COL = new ColumnKey( Bundle.AnalysisResultDAO_columnKeys_sourceType_name(), Bundle.AnalysisResultDAO_columnKeys_sourceType_displayName(), Bundle.AnalysisResultDAO_columnKeys_sourceType_description() ); - + synchronized static AnalysisResultDAO getInstance() { if (instance == null) { instance = new AnalysisResultDAO(); @@ -140,7 +140,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public static Set getIgnoredTreeTypes() { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - + @SuppressWarnings("deprecation") private static final Set STANDARD_SET_TYPES = ImmutableSet.of( BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID(), @@ -153,15 +153,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { private final Cache, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); - - private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); - + + private final TreeCounts treeCounts = new TreeCounts<>(); + private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); - + List arts = new ArrayList<>(); String pagedWhereClause = getWhereClause(cacheKey); arts.addAll(blackboard.getAnalysisResultsWhere(pagedWhereClause)); @@ -169,16 +169,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // Get total number of results long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + TableData tableData = createTableData(artType, arts); return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } - + private AnalysisResultTableSearchResultsDTO fetchSetNameHitsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); - + Long dataSourceId = cacheKey.getParamData().getDataSourceId(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); @@ -187,9 +187,9 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null) { originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " "; } - + String expectedSetName = cacheKey.getParamData().getSetName(); - + List allHashHits = new ArrayList<>(); allHashHits.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause)); blackboard.loadBlackboardAttributes(allHashHits); @@ -203,12 +203,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { arts.add(art); } } - + List pagedArtifacts = getPaged(arts, cacheKey); TableData tableData = createTableData(artType, pagedArtifacts); return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size()); } - + @Override void addAnalysisResultColumnKeys(List columnKeys) { // Make sure these are in the same order as in addAnalysisResultFields() @@ -218,7 +218,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { columnKeys.add(CONFIGURATION_COL); columnKeys.add(JUSTIFICATION_COL); } - + @Override void addAnalysisResultFields(BlackboardArtifact artifact, List cells) throws TskCoreException { if (!(artifact instanceof AnalysisResult)) { @@ -265,7 +265,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } return ""; } - + @Override RowDTO createRow(BlackboardArtifact artifact, Content srcContent, Content linkedFile, boolean isTimelineSupported, List cellValues, long id) throws IllegalArgumentException { if (!(artifact instanceof AnalysisResult)) { @@ -273,36 +273,36 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } return new AnalysisResultRowDTO((AnalysisResult) artifact, srcContent, isTimelineSupported, cellValues, id); } - + public AnalysisResultTableSearchResultsDTO getAnalysisResultsForTable(AnalysisResultSearchParam artifactKey, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { BlackboardArtifact.Type artType = artifactKey.getArtifactType(); - + if (artType == null || artType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT || (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0)) { throw new IllegalArgumentException(MessageFormat.format("Illegal data. " + "Artifact type must be non-null and analysis result. Data source id must be null or > 0. " + "Received artifact type: {0}; data source id: {1}", artType, artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); return analysisResultCache.get(searchParams, () -> fetchAnalysisResultsForTable(searchParams)); } - + private boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, DAOEvent eventData) { if (!(eventData instanceof AnalysisResultEvent)) { return false; } - + AnalysisResultEvent analysisResultEvt = (AnalysisResultEvent) eventData; return key.getArtifactType().getTypeID() == analysisResultEvt.getArtifactType().getTypeID() && (key.getDataSourceId() == null || key.getDataSourceId() == analysisResultEvt.getDataSourceId()); } - + private boolean isAnalysisResultsSetInvalidating(AnalysisResultSetSearchParam key, DAOEvent event) { if (!(event instanceof AnalysisResultSetEvent)) { return false; } - + AnalysisResultSetEvent setEvent = (AnalysisResultSetEvent) event; return isAnalysisResultsInvalidating((AnalysisResultSearchParam) key, (AnalysisResultEvent) setEvent) && Objects.equals(key.getSetName(), setEvent.getSetName()); @@ -315,7 +315,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + "Data source id must be null or > 0. " + "Received data source id: {0}", artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); return setHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams)); } @@ -328,19 +328,19 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + "Data source id must be null or > 0. " + "Received data source id: {0}", artifactKey.getDataSourceId() == null ? "" : artifactKey.getDataSourceId())); } - + SearchParams searchParams = new SearchParams<>(artifactKey, startItem, maxCount); return keywordHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams)); } - + public void dropAnalysisResultCache() { analysisResultCache.invalidateAll(); } - + public void dropHashHitCache() { setHitCache.invalidateAll(); } - + public void dropKeywordHitCache() { keywordHitCache.invalidateAll(); } @@ -367,12 +367,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // return results return new TreeResultsDTO<>(treeItemRows); - + } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching analysis result counts.", ex); } } - + private TreeItemDTO getTreeItem(BlackboardArtifact.Type type, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeItemDTO<>( BlackboardArtifact.Category.ANALYSIS_RESULT.name(), @@ -399,7 +399,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null && dataSourceId <= 0) { throw new IllegalArgumentException("Expected data source id to be > 0"); } - + try { // get artifact types and counts SleuthkitCase skCase = getCase(); @@ -414,7 +414,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + ((dataSourceId == null) ? "" : " AND art.data_source_obj_id = " + dataSourceId + " \n") + ") res \n" + "GROUP BY res.set_name"; - + Map setCounts = new HashMap<>(); skCase.getCaseDbAccessManager().select(query, (resultSet) -> { try { @@ -427,7 +427,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { logger.log(Level.WARNING, "An error occurred while fetching set name counts.", ex); } }); - + return setCounts; } catch (NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching set counts", ex); @@ -457,7 +457,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { BlackboardArtifact.Type type, Long dataSourceId, String nullSetName) throws IllegalArgumentException, ExecutionException { - + List> allSets = getSetCountsMap(type, BlackboardAttribute.Type.TSK_SET_NAME, dataSourceId).entrySet().stream() .filter(entry -> nullSetName != null || entry.getKey() != null) @@ -470,13 +470,13 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { TreeDisplayCount.getDeterminate(entry.getValue())); }) .collect(Collectors.toList()); - + return new TreeResultsDTO<>(allSets); } - + private TreeItemDTO getSetTreeItem(BlackboardArtifact.Type type, Long dataSourceId, String setName, String displayName, TreeDisplayCount displayCount) { - + return new TreeItemDTO<>( type.getTypeName(), new AnalysisResultSetSearchParam(type, dataSourceId, setName), @@ -527,15 +527,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null && dataSourceId <= 0) { throw new IllegalArgumentException("Expected data source id to be > 0"); } - + String dataSourceClause = dataSourceId == null ? "" : "AND art.data_source_obj_id = ?\n"; - + String setNameClause = setName == null ? "attr_res.set_name IS NULL" : "attr_res.set_name = ?"; - + String query = "res.search_term,\n" + " res.search_type,\n" + " SUM(res.count) AS count,\n" @@ -580,16 +580,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // get artifact types and counts try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) { - + int paramIdx = 0; if (dataSourceId != null) { preparedStatement.setLong(++paramIdx, dataSourceId); } - + if (setName != null) { preparedStatement.setString(++paramIdx, setName); } - + List> items = new ArrayList<>(); getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> { try { @@ -598,7 +598,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { int searchType = resultSet.getInt("search_type"); long count = resultSet.getLong("count"); boolean hasChildren = resultSet.getBoolean("has_children"); - + String searchTermModified; switch (searchType) { case 0: @@ -615,7 +615,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { searchTermModified = searchTerm; break; } - + TreeItemDTO treeItem = new TreeItemDTO<>( "KEYWORD_SEARCH_TERMS", new KeywordSearchTermParams(setName, searchTerm, searchType, hasChildren, dataSourceId), @@ -623,16 +623,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { searchTermModified, TreeDisplayCount.getDeterminate(count) ); - + items.add(treeItem); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); } }); - + return new TreeResultsDTO<>(items); - + } catch (SQLException | NoCurrentCaseException | TskCoreException ex) { throw new ExecutionException("An error occurred while fetching set counts", ex); } @@ -656,15 +656,15 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (dataSourceId != null && dataSourceId <= 0) { throw new IllegalArgumentException("Expected data source id to be > 0"); } - + String dataSourceClause = dataSourceId == null ? "" : "AND data_source_obj_id = ?\n"; - + String setNameClause = setName == null ? "res.set_name IS NULL" : "res.set_name = ?"; - + String query = "keyword, \n" + " COUNT(*) AS count \n" + "FROM (\n" @@ -686,28 +686,28 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { + "AND res.regexp_str = ?\n" + "AND res.search_type = ?\n" + "GROUP BY keyword"; - + try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) { // get artifact types and counts int paramIdx = 0; if (dataSourceId != null) { preparedStatement.setLong(++paramIdx, dataSourceId); } - + if (setName != null) { preparedStatement.setString(++paramIdx, setName); } - + preparedStatement.setString(++paramIdx, regexStr); preparedStatement.setInt(++paramIdx, searchType); - + List> items = new ArrayList<>(); getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> { try { while (resultSet.next()) { String keyword = resultSet.getString("keyword"); long count = resultSet.getLong("count"); - + items.add(new TreeItemDTO<>( "KEYWORD_MATCH", new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId), @@ -719,29 +719,29 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex); } }); - + return new TreeResultsDTO<>(items); } catch (NoCurrentCaseException | TskCoreException | SQLException ex) { throw new ExecutionException("An error occurred while fetching keyword counts", ex); } } - + @Override void clearCaches() { this.analysisResultCache.invalidateAll(); this.keywordHitCache.invalidateAll(); this.setHitCache.invalidateAll(); - this.flushEvents(); + this.handleIngestComplete(); } - + @Override Collection processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. Map> analysisResultMap = new HashMap<>(); Map, Set> setMap = new HashMap<>(); Map> keywordHitsMap = new HashMap<>(); - - ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + + ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); if (dataEvt != null) { for (BlackboardArtifact art : dataEvt.getArtifacts()) { try { @@ -752,7 +752,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { String setName = setAttr == null ? null : setAttr.getValueString(); setMap.computeIfAbsent(Pair.of(art.getType(), setName), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); - + } else if (BlackboardArtifact.Category.ANALYSIS_RESULT.equals(art.getType().getCategory())) { analysisResultMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) .add(art.getDataSourceObjectID()); @@ -767,21 +767,21 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { return Collections.emptyList(); } - + clearRelevantCacheEntries(analysisResultMap, setMap); - - List daoEvents = getDAOEvents(analysisResultMap, setMap); - Collection treeEvents = this.treeCache.enqueueAll(daoEvents).stream() + + List daoEvents = getResultViewEvents(analysisResultMap, setMap); + Collection treeEvents = this.treeCounts.enqueueAll(daoEvents).stream() .map(arEvt -> getTreeEvent(arEvt, false)) .collect(Collectors.toList()); - + return Stream.of(daoEvents, treeEvents) .flatMap(lst -> lst.stream()) .collect(Collectors.toList()); } /** - * Generate DAO events from digest of Autopsy events. + * Generate result view events from digest of Autopsy events. * * @param analysisResultMap Contains the analysis results that do not use a * set name. A mapping of analysis result type ids @@ -793,11 +793,10 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { * * @return The list of dao events. */ - private List getDAOEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { - // invalidate cache entries that are affected by events + private List getResultViewEvents(Map> analysisResultMap, Map, Set> resultsWithSetMap) { Stream analysisResultEvts = analysisResultMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultEvent(entry.getKey(), dsId))); - + Stream analysisResultSetEvts = resultsWithSetMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new AnalysisResultSetEvent(entry.getKey().getRight(), entry.getKey().getLeft(), dsId))); @@ -827,7 +826,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { arConcurrentMap.remove(k); } }); - + ConcurrentMap, AnalysisResultTableSearchResultsDTO> setConcurrentMap = this.setHitCache.asMap(); setConcurrentMap.forEach((k, v) -> { AnalysisResultSetSearchParam searchParam = k.getParamData(); @@ -840,7 +839,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // GVDTODO handle clearing cache for keyword search hits // private final Cache, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build(); } - + + /** + * Creates a TreeEvent instance based on the analysis result event and + * whether or not this event should trigger a full refresh of counts. + * + * @param arEvt The analysis result event. + * @param shouldRefresh Whether or not this tree event should trigger a full + * refresh of counts. + * + * @return The tree event. + */ private TreeEvent getTreeEvent(AnalysisResultEvent arEvt, boolean shouldRefresh) { // GVDTODO handle keyword items when integrated if (arEvt instanceof AnalysisResultSetEvent) { @@ -852,17 +861,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return new TreeEvent(getTreeItem(arEvt.getArtifactType(), arEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), shouldRefresh); } } - + @Override - Collection flushEvents() { - return this.treeCache.flushEvents().stream() + Collection handleIngestComplete() { + return this.treeCounts.flushEvents().stream() .map(arEvt -> getTreeEvent(arEvt, true)) .collect(Collectors.toList()); } - + @Override Collection shouldRefreshTree() { - return this.treeCache.getEventTimeouts().stream() + return this.treeCounts.getEventTimeouts().stream() .map(arEvt -> getTreeEvent(arEvt, true)) .collect(Collectors.toList()); } @@ -880,16 +889,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public AnalysisResultFetcher(AnalysisResultSearchParam params) { super(params); } - + protected AnalysisResultDAO getDAO() { return MainDAO.getInstance().getAnalysisResultDAO(); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getAnalysisResultsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { return getDAO().isAnalysisResultsInvalidating(this.getParameters(), evt); @@ -909,16 +918,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public AnalysisResultSetFetcher(AnalysisResultSetSearchParam params) { super(params); } - + protected AnalysisResultDAO getDAO() { return MainDAO.getInstance().getAnalysisResultDAO(); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { return getDAO().isAnalysisResultsSetInvalidating(this.getParameters(), evt); @@ -938,16 +947,16 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { public KeywordHitResultFetcher(KeywordHitSearchParam params) { super(params); } - + protected AnalysisResultDAO getDAO() { return MainDAO.getInstance().getAnalysisResultDAO(); } - + @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getKeywordHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } - + @Override public boolean isRefreshRequired(DAOEvent evt) { // GVDTODO diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index 64cc40d823..8f1f50f092 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -156,7 +156,7 @@ public class CommAccountsDAO extends AbstractDAO { } @Override - Collection flushEvents() { + Collection handleIngestComplete() { // GVDTODO return Collections.emptyList(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index b97311f4fc..b595b132d7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -45,7 +45,7 @@ import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; -import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEventTimedCache; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -78,7 +78,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - private final TreeEventTimedCache treeCache = new TreeEventTimedCache<>(); + private final TreeCounts treeCounts = new TreeCounts<>(); private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -150,7 +150,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { public TreeResultsDTO getDataArtifactCounts(Long dataSourceId) throws ExecutionException { try { // get row dto's sorted by display name - Set indeterminateTypes = this.treeCache.getEnqueued().stream() + Set indeterminateTypes = this.treeCounts.getEnqueued().stream() .filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId) .map(evt -> evt.getArtifactType()) .collect(Collectors.toSet()); @@ -177,13 +177,13 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override void clearCaches() { this.dataArtifactCache.invalidateAll(); - this.flushEvents(); + this.handleIngestComplete(); } @Override List processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. - ModuleDataEvent dataEvt = DAOEventUtils.getModuleDataFromEvt(evt); + ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); if (dataEvt == null) { return Collections.emptyList(); } @@ -229,7 +229,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - List newTreeEvents = this.treeCache.enqueueAll(dataArtifactEvents).stream() + List newTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) .collect(Collectors.toList()); @@ -248,15 +248,15 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } @Override - Collection flushEvents() { - return this.treeCache.flushEvents().stream() + Collection handleIngestComplete() { + return this.treeCounts.flushEvents().stream() .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) .collect(Collectors.toList()); } @Override Collection shouldRefreshTree() { - return this.treeCache.getEventTimeouts().stream() + return this.treeCounts.getEventTimeouts().stream() .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) .collect(Collectors.toList()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index 5255b063a9..e643a30b66 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -357,7 +357,7 @@ public class FileSystemDAO extends AbstractDAO { } @Override - Collection flushEvents() { + Collection handleIngestComplete() { // GVDTODO return Collections.emptyList(); } @@ -376,7 +376,7 @@ public class FileSystemDAO extends AbstractDAO { Set affectedParentContent = new HashSet<>(); boolean refreshAllContent = false; - Content content = DAOEventUtils.getDerivedContentFromEvt(evt); + Content content = DAOEventUtils.getDerivedFileContentFromEvt(evt); if (content != null) { Content parentContent; try { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 1afa1b868a..2d0447ac83 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -271,9 +271,9 @@ public class MainDAO extends AbstractDAO { } @Override - Collection flushEvents() { + Collection handleIngestComplete() { List> daoStreamEvts = allDAOs.stream() - .map((subDAO) -> subDAO.flushEvents()) + .map((subDAO) -> subDAO.handleIngestComplete()) .collect(Collectors.toList()); daoStreamEvts.add(eventBatcher.flushEvents()); @@ -283,7 +283,15 @@ public class MainDAO extends AbstractDAO { .collect(Collectors.toList()); } - private void handleEvent(PropertyChangeEvent evt, boolean immediateAction) { + /** + * Processes and handles an autopsy event. + * + * @param evt The event. + * @param immediateResultAction If true, result events are immediately + * fired. Otherwise, the result events are + * batched. + */ + private void handleEvent(PropertyChangeEvent evt, boolean immediateResultAction) { Collection daoEvts = processEvent(evt); Map> daoEvtsByType = daoEvts.stream() @@ -292,7 +300,7 @@ public class MainDAO extends AbstractDAO { fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); List resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); - if (immediateAction) { + if (immediateResultAction) { fireResultEvts(resultEvts); } else { eventBatcher.enqueueAllEvents(resultEvts); @@ -300,7 +308,7 @@ public class MainDAO extends AbstractDAO { } private void handleEventFlush() { - Collection daoEvts = flushEvents(); + Collection daoEvts = handleIngestComplete(); Map> daoEvtsByType = daoEvts.stream() .collect(Collectors.groupingBy(e -> e.getType())); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 838440f173..10237412a1 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -186,7 +186,7 @@ public class OsAccountsDAO extends AbstractDAO { } @Override - Collection flushEvents() { + Collection handleIngestComplete() { // GVDTODO return Collections.emptyList(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 3c8f9629ef..800c2e09ed 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -314,7 +314,7 @@ public class TagsDAO extends AbstractDAO { } @Override - Collection flushEvents() { + Collection handleIngestComplete() { // GVDTODO return Collections.emptyList(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index d593ae9b89..09d5d87cf1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -655,7 +655,7 @@ public class ViewsDAO extends AbstractDAO { } @Override - Collection flushEvents() { + Collection handleIngestComplete() { // GVDTODO return Collections.emptyList(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java index b5bbcec03e..834f4872fd 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/BlackboardArtifactEvent.java @@ -24,7 +24,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact; /** * An event for an artifact added in a particular type. */ -public class BlackboardArtifactEvent implements DAOEvent { +public abstract class BlackboardArtifactEvent implements DAOEvent { private final BlackboardArtifact.Type artifactType; private final long dataSourceId; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java index 3c1d81cc85..c70b0ff34c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java @@ -32,17 +32,16 @@ import org.sleuthkit.datamodel.Content; public class DAOEventUtils { /** - * Returns the content from the event. If the event does not - * contain a event or the event does not contain Content, null - * is returned. + * Returns the file content from the event. If the event is not a file event + * or the event does not contain file content, null is returned. * * @param evt The event * * @return The inner content or null if no content. */ - public static Content getContentFromEvt(PropertyChangeEvent evt) { + public static Content getContentFromFileEvent(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); - Content derivedContent = getDerivedContentFromEvt(evt); + Content derivedContent = getDerivedFileContentFromEvt(evt); if (derivedContent != null) { return derivedContent; } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) @@ -52,15 +51,16 @@ public class DAOEventUtils { return null; } } - + /** * Returns the content from the ModuleContentEvent. If the event does not - * contain a event or the event does not contain Content, null - * is returned. + * contain a event or the event does not contain Content, null is returned. + * * @param evt The event + * * @return The inner content or null if no content. */ - public static Content getDerivedContentFromEvt(PropertyChangeEvent evt) { + public static Content getDerivedFileContentFromEvt(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) && (evt.getOldValue() instanceof ModuleContentEvent) @@ -75,11 +75,13 @@ public class DAOEventUtils { /** * Returns a file in the event if a file is found in the event. + * * @param evt The autopsy event. + * * @return The inner file or null if no file found. */ public static AbstractFile getFileFromEvt(PropertyChangeEvent evt) { - Content content = getContentFromEvt(evt); + Content content = getContentFromFileEvent(evt); return (content instanceof AbstractFile) ? ((AbstractFile) content) : null; @@ -93,7 +95,7 @@ public class DAOEventUtils { * * @return The inner ModuleDataEvent or null. */ - public static ModuleDataEvent getModuleDataFromEvt(PropertyChangeEvent evt) { + public static ModuleDataEvent getModuelDataFromArtifactEvent(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); if (IngestManager.IngestModuleEvent.DATA_ADDED.toString().equals(eventName) && (evt.getOldValue() instanceof ModuleDataEvent)) { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeCounts.java similarity index 59% rename from Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java rename to Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeCounts.java index 3a60166006..edb62d0f01 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEventTimedCache.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeCounts.java @@ -27,31 +27,66 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -public class TreeEventTimedCache { +/** + * This class is in charge of tracking tree events. When an autopsy event comes + * in that affects a tree node, the sub DAO's enqueue the event in this class + * along with the timeout (current time + timeoutMillis). If another autopsy + * event comes in affecting the same tree category, the timeout is reset. Events + * are not removed from tracking until getEventTimeouts is flushEvents are + * called. The MainDAO has a periodically running task to see if any tree events + * have timed out, and broadcasts those events that have reached timeout. + */ +public class TreeCounts { + private static final long DEFAULT_TIMEOUT_MILLIS = 2 * 60 * 1000; - + private final Object timeoutLock = new Object(); private final Map eventTimeouts = new HashMap<>(); private final long timeoutMillis; - - public TreeEventTimedCache() { + /** + * Constructor that uses default timeout duration. + */ + public TreeCounts() { this(DEFAULT_TIMEOUT_MILLIS); } - - public TreeEventTimedCache(long timeoutMillis) { + + /** + * Main constructor. + * + * @param timeoutMillis How long to track an event before it reaches a + * timeout (in milliseconds). + */ + public TreeCounts(long timeoutMillis) { this.timeoutMillis = timeoutMillis; } + /** + * Returns the current time in milliseconds. + * + * @return The current time in milliseconds. + */ private long getCurTime() { return System.currentTimeMillis(); } + /** + * Returns the timeout time based on the current time. + * + * @return The current time. + */ private long getTimeoutTime() { return getCurTime() + timeoutMillis; } + /** + * Adds events to be tracked until they reach timeout. + * + * @param events The events to be tracked. + * + * @return The subset of events that were not already being tracked. + */ public Collection enqueueAll(Collection events) { Collection updateToIndeterminate = new ArrayList<>(); @@ -68,11 +103,22 @@ public class TreeEventTimedCache { return updateToIndeterminate; } - + + /** + * Returns the set of events that are currently being tracked for timeout. + * + * @return The events that are being tracked for timeout. + */ public Set getEnqueued() { return new HashSet<>(eventTimeouts.keySet()); } + /** + * Returns the events that have reached timeout based on the current time + * stamp and removes them from tracking. + * + * @return The + */ public Collection getEventTimeouts() { long curTime = getCurTime(); List toUpdate; @@ -87,6 +133,12 @@ public class TreeEventTimedCache { return toUpdate; } + /** + * Returns all currently tracked events despite timeout. This method removes + * all events from tracking. + * + * @return All currently tracked events. + */ public Collection flushEvents() { synchronized (this.timeoutLock) { List toRet = new ArrayList<>(eventTimeouts.keySet()); From 924e13d6ed019c9bd3ea1fd676d8c126feedd8cd Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Wed, 24 Nov 2021 15:06:45 -0500 Subject: [PATCH 093/142] Update TreeEvent.java --- .../autopsy/mainui/datamodel/events/TreeEvent.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java index 9207a7f152..4fadbbc672 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/TreeEvent.java @@ -21,11 +21,19 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +/** + * An event to signal that an item in the tree has been + * added or changed. + */ public class TreeEvent implements DAOEvent { - private final TreeItemDTO itemRecord; - private final boolean refreshRequired; + private final TreeItemDTO itemRecord; // the updated item + private final boolean refreshRequired; // true if tree should request new data from DAO + /** + * @param itemRecord The updated item + * @param rereshRequired True if the tree should go to the DAO for updated data + */ public TreeEvent(TreeItemDTO itemRecord, boolean refreshRequired) { this.itemRecord = itemRecord; this.refreshRequired = refreshRequired; From 1893bc6512e2dc30747c362bdcc20aaa2ba6bf10 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 24 Nov 2021 15:20:53 -0500 Subject: [PATCH 094/142] change to set --- .../autopsy/mainui/datamodel/AbstractDAO.java | 8 ++--- .../mainui/datamodel/AnalysisResultDAO.java | 14 ++++----- .../mainui/datamodel/CommAccountsDAO.java | 12 ++++---- .../mainui/datamodel/DataArtifactDAO.java | 16 +++++----- .../mainui/datamodel/FileSystemDAO.java | 16 +++++----- .../autopsy/mainui/datamodel/MainDAO.java | 30 +++++++++---------- .../mainui/datamodel/OsAccountsDAO.java | 14 ++++----- .../autopsy/mainui/datamodel/TagsDAO.java | 14 ++++----- .../autopsy/mainui/datamodel/ViewsDAO.java | 20 ++++++------- .../datamodel/events/DAOAggregateEvent.java | 10 +++---- .../datamodel/events/DAOEventBatcher.java | 6 ++-- 11 files changed, 80 insertions(+), 80 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index 432182778e..a03b0a1528 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -20,7 +20,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import java.beans.PropertyChangeEvent; -import java.util.Collection; +import java.util.Set; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** @@ -44,7 +44,7 @@ abstract class AbstractDAO { * @return The list of DAOEvents that should be broadcasted to the views or * an empty list if the Autopsy events are irrelevant to this DAO. */ - abstract Collection processEvent(PropertyChangeEvent evt); + abstract Set processEvent(PropertyChangeEvent evt); /** * Handles the ingest complete or cancelled event. Any events that are @@ -52,7 +52,7 @@ abstract class AbstractDAO { * * @return The flushed events that were delayed and batched. */ - abstract Collection handleIngestComplete(); + abstract Set handleIngestComplete(); /** * Returns any categories that require a tree refresh. For instance, if web @@ -62,5 +62,5 @@ abstract class AbstractDAO { * * @return The categories that require a tree refresh. */ - abstract Collection shouldRefreshTree(); + abstract Set shouldRefreshTree(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 5de95de49f..3c034d66fa 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -735,7 +735,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - Collection processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. Map> analysisResultMap = new HashMap<>(); Map, Set> setMap = new HashMap<>(); @@ -765,7 +765,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { // don't continue if no relevant items found if (analysisResultMap.isEmpty() && setMap.isEmpty() && keywordHitsMap.isEmpty()) { - return Collections.emptyList(); + return Collections.emptySet(); } clearRelevantCacheEntries(analysisResultMap, setMap); @@ -777,7 +777,7 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { return Stream.of(daoEvents, treeEvents) .flatMap(lst -> lst.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** @@ -863,17 +863,17 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { return this.treeCounts.flushEvents().stream() .map(arEvt -> getTreeEvent(arEvt, true)) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { return this.treeCounts.getEventTimeouts().stream() .map(arEvt -> getTreeEvent(arEvt, true)) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index 8f1f50f092..a0a7833441 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -156,19 +156,19 @@ public class CommAccountsDAO extends AbstractDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { // maps account type to the data sources affected // GVDTODO this can probably be rewritten now that it isn't handling a list of autopsy events Map> commAccountsAffected = new HashMap<>(); @@ -210,7 +210,7 @@ public class CommAccountsDAO extends AbstractDAO { return commAccountsAffected.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new CommAccountsEvent(entry.getKey(), dsId))) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index b595b132d7..9b79a0fe29 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -181,11 +181,11 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { // get a grouping of artifacts mapping the artifact type id to data source id. ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); if (dataEvt == null) { - return Collections.emptyList(); + return Collections.emptySet(); } Map> artifactTypeDataSourceMap = dataEvt.getArtifacts().stream() @@ -204,7 +204,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { // don't do anything else if no relevant events if (artifactTypeDataSourceMap.isEmpty()) { - return Collections.emptyList(); + return Collections.emptySet(); } // invalidate cache entries that are affected by events @@ -235,7 +235,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return Stream.of(dataArtifactEvents, newTreeEvents) .flatMap((lst) -> lst.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } private TreeItemDTO getTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { @@ -248,17 +248,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { return this.treeCounts.flushEvents().stream() .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { return this.treeCounts.getEventTimeouts().stream() .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index e643a30b66..bb08d7b6af 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -357,19 +357,19 @@ public class FileSystemDAO extends AbstractDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { // GVDTODO these can probably be rewritten now that it isn't handling a collection of autopsy events Set affectedPersons = new HashSet<>(); Set affectedHosts = new HashSet<>(); @@ -383,11 +383,11 @@ public class FileSystemDAO extends AbstractDAO { parentContent = content.getParent(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Unable to get parent content of content with id: " + content.getId(), ex); - return Collections.emptyList(); + return Collections.emptySet(); } if (parentContent == null) { - return Collections.emptyList(); + return Collections.emptySet(); } if (invalidatesAllFileSystem(parentContent)) { @@ -453,7 +453,7 @@ public class FileSystemDAO extends AbstractDAO { fileEvts ) .flatMap(s -> s) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java index 2d0447ac83..44de97b935 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java @@ -255,23 +255,23 @@ public class MainDAO extends AbstractDAO { } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { return allDAOs.stream() .map(subDAO -> subDAO.processEvent(evt)) .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } @Override - List shouldRefreshTree() { + Set shouldRefreshTree() { return allDAOs.stream() .map((subDAO) -> subDAO.shouldRefreshTree()) .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { List> daoStreamEvts = allDAOs.stream() .map((subDAO) -> subDAO.handleIngestComplete()) .collect(Collectors.toList()); @@ -280,7 +280,7 @@ public class MainDAO extends AbstractDAO { return daoStreamEvts.stream() .flatMap(evts -> evts == null ? Stream.empty() : evts.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** @@ -294,12 +294,12 @@ public class MainDAO extends AbstractDAO { private void handleEvent(PropertyChangeEvent evt, boolean immediateResultAction) { Collection daoEvts = processEvent(evt); - Map> daoEvtsByType = daoEvts.stream() - .collect(Collectors.groupingBy(e -> e.getType())); + Map> daoEvtsByType = daoEvts.stream() + .collect(Collectors.groupingBy(e -> e.getType(), Collectors.toSet())); fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); - List resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); + Set resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); if (immediateResultAction) { fireResultEvts(resultEvts); } else { @@ -308,24 +308,24 @@ public class MainDAO extends AbstractDAO { } private void handleEventFlush() { - Collection daoEvts = handleIngestComplete(); + Collection daoEvts = handleIngestComplete(); - Map> daoEvtsByType = daoEvts.stream() - .collect(Collectors.groupingBy(e -> e.getType())); + Map> daoEvtsByType = daoEvts.stream() + .collect(Collectors.groupingBy(e -> e.getType(), Collectors.toSet())); fireTreeEvts(daoEvtsByType.get(DAOEvent.Type.TREE)); - List resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); + Set resultEvts = daoEvtsByType.get(DAOEvent.Type.RESULT); fireResultEvts(resultEvts); } - private void fireResultEvts(Collection resultEvts) { + private void fireResultEvts(Set resultEvts) { if (CollectionUtils.isNotEmpty(resultEvts)) { resultEventsManager.firePropertyChange("DATA_CHANGE", null, new DAOAggregateEvent(resultEvts)); } } - private void fireTreeEvts(Collection treeEvts) { + private void fireTreeEvts(Set treeEvts) { if (CollectionUtils.isNotEmpty(treeEvts)) { treeEventsManager.firePropertyChange("TREE_CHANGE", null, new DAOAggregateEvent(treeEvts)); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java index 10237412a1..78020b4dbf 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java @@ -186,26 +186,26 @@ public class OsAccountsDAO extends AbstractDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { if (!OS_EVENTS.contains(evt.getPropertyName())) { - return Collections.emptyList(); + return Collections.emptySet(); } this.searchParamsCache.invalidateAll(); - return Collections.singletonList(new OsAccountEvent()); + return Collections.singleton(new OsAccountEvent()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java index 800c2e09ed..37c23ef375 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TagsDAO.java @@ -314,19 +314,19 @@ public class TagsDAO extends AbstractDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { // GVDTODO this may be rewritten simpler now that it isn't processing a list of events Map, Set>> mapping = new HashMap<>(); @@ -340,7 +340,7 @@ public class TagsDAO extends AbstractDAO { // don't continue if no mapping entries if (mapping.isEmpty()) { - return Collections.emptyList(); + return Collections.emptySet(); } ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); @@ -366,7 +366,7 @@ public class TagsDAO extends AbstractDAO { return entry.getValue().stream() .map((dsIdOpt) -> new TagsEvent(tagType, tagNameId, dsIdOpt.orElse(null))); }) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index 09d5d87cf1..f51edb61be 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -655,19 +655,19 @@ public class ViewsDAO extends AbstractDAO { } @Override - Collection handleIngestComplete() { + Set handleIngestComplete() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - Collection shouldRefreshTree() { + Set shouldRefreshTree() { // GVDTODO - return Collections.emptyList(); + return Collections.emptySet(); } @Override - List processEvent(PropertyChangeEvent evt) { + Set processEvent(PropertyChangeEvent evt) { // GVDTODO maps may not be necessary now that this isn't processing a list of events. Map> fileExtensionDsMap = new HashMap<>(); Map>> mimeTypeDsMap = new HashMap<>(); @@ -675,7 +675,7 @@ public class ViewsDAO extends AbstractDAO { AbstractFile af = DAOEventUtils.getFileFromEvt(evt); if (af == null) { - return Collections.emptyList(); + return Collections.emptySet(); } // create an extension mapping if extension present @@ -707,7 +707,7 @@ public class ViewsDAO extends AbstractDAO { } if (fileExtensionDsMap.isEmpty() && mimeTypeDsMap.isEmpty() && fileSizeDsMap.isEmpty()) { - return Collections.emptyList(); + return Collections.emptySet(); } clearRelevantCacheEntries(fileExtensionDsMap, mimeTypeDsMap, fileSizeDsMap); @@ -728,14 +728,14 @@ public class ViewsDAO extends AbstractDAO { * * @return The list of affected dao events. */ - private List getDAOEvents(Map> fileExtensionDsMap, + private Set getDAOEvents(Map> fileExtensionDsMap, Map>> mimeTypeDsMap, Map> fileSizeDsMap) { Stream fileExtStream = fileExtensionDsMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream().map(dsId -> new FileTypeExtensionsEvent(entry.getKey(), dsId))); - List fileMimeList = new ArrayList<>(); + Set fileMimeList = new HashSet<>(); for (Entry>> prefixEntry : mimeTypeDsMap.entrySet()) { String mimePrefix = prefixEntry.getKey(); for (Entry> suffixEntry : prefixEntry.getValue().entrySet()) { @@ -752,7 +752,7 @@ public class ViewsDAO extends AbstractDAO { return Stream.of(fileExtStream, fileMimeList.stream(), fileSizeStream) .flatMap(stream -> stream) - .collect(Collectors.toList()); + .collect(Collectors.toSet()); } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java index f00832c922..d8930e37ca 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOAggregateEvent.java @@ -18,29 +18,29 @@ */ package org.sleuthkit.autopsy.mainui.datamodel.events; -import java.util.Collection; import java.util.Collections; +import java.util.Set; /** * A single event containing an aggregate of all affected data. */ public class DAOAggregateEvent { - private final Collection objects; + private final Set objects; /** * Main constructor. * * @param objects The list of events in this aggregate event. */ - public DAOAggregateEvent(Collection objects) { - this.objects = Collections.unmodifiableCollection(objects); + public DAOAggregateEvent(Set objects) { + this.objects = Collections.unmodifiableSet(objects); } /** * @return The events in this aggregate event. */ - public Collection getEvents() { + public Set getEvents() { return objects; } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java index 034dd59243..33789cb007 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventBatcher.java @@ -43,7 +43,7 @@ public class DAOEventBatcher { * * @param events The events to handle. */ - void handle(Collection events); + void handle(Set events); } private final ScheduledThreadPoolExecutor refreshExecutor @@ -105,9 +105,9 @@ public class DAOEventBatcher { * * @return The flushed events. */ - public Collection flushEvents() { + public Set flushEvents() { synchronized (this.eventListLock) { - Collection evtsToFire = this.aggregateEvents; + Set evtsToFire = this.aggregateEvents; this.aggregateEvents = new HashSet<>(); this.isRunning = false; return evtsToFire; From 7e7c6e90291db578d292637f58876a17fa9a1fd5 Mon Sep 17 00:00:00 2001 From: Brian Carrier Date: Wed, 24 Nov 2021 15:41:37 -0500 Subject: [PATCH 095/142] Update TreeChildFactory.java --- .../mainui/nodes/TreeChildFactory.java | 23 +++++++++++++------ 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 990c2ac2f3..b0c471a22c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -40,7 +40,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** - * Factory for populating tree with results. + * Factory for populating child nodes in a tree based on TreeResultsDTO */ public abstract class TreeChildFactory extends ChildFactory.Detachable implements Comparator { @@ -68,17 +68,26 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable> typeNodeMap = new MapMaker().weakValues().makeMap(); private final Object resultsUpdateLock = new Object(); + // Results of the last full load from the DAO. May not be complete because + // events will come in with more updated data. private TreeResultsDTO curResults = null; + + // All current child items (sorted). May have more items than curResults does because + // this is updated based on events and new data. private List> curItemsList = new ArrayList<>(); + + // maps the Node key (ID) to its DTO private Map> idMapping = new HashMap<>(); @Override protected boolean createKeys(List toPopulate) { List> itemsList; synchronized (resultsUpdateLock) { + // Load data from DAO if we haven't already if (curResults == null) { try { updateData(); @@ -116,9 +125,9 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item) { TreeNode cachedTreeNode = this.typeNodeMap.get(item.getId()); @@ -143,7 +152,7 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable extends ChildFactory.Detachable extends ChildFactory.Detachable Date: Wed, 24 Nov 2021 16:27:08 -0500 Subject: [PATCH 096/142] Update SearchResultsDTO.java --- .../autopsy/mainui/datamodel/SearchResultsDTO.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java index 3d723265df..8346c2e1b7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SearchResultsDTO.java @@ -21,22 +21,28 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.List; /** - * - * @author gregd + * Interface for all search results that are used to display in the table/DataResultViewer area. */ public interface SearchResultsDTO { + // returns the type of data String getTypeId(); + + // Returns a unique signature for the type of data. Used keep track of custom column ordering. + String getSignature(); + // Text to display at top of the table about the type of the results. String getDisplayName(); + // Sorted list of column headers. The RowDTO column values will be in the same order List getColumns(); + // Page-sized, sorted list of rows to display List getItems(); + // total number of results (could be bigger than what is in the results) long getTotalResultsCount(); + // Index in the total results that this set/page starts at long getStartItem(); - - String getSignature(); } From f6236ad445591498c61c946c026d5e265f251097 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 08:21:02 -0500 Subject: [PATCH 097/142] updates --- .../mainui/datamodel/FileSystemDAO.java | 2 +- .../mainui/datamodel/TreeResultsDTO.java | 35 +++++++------- .../autopsy/mainui/datamodel/ViewsDAO.java | 4 +- .../datamodel/events/DAOEventUtils.java | 6 +-- .../nodes/AnalysisResultTypeFactory.java | 46 +++++++++---------- .../mainui/nodes/DataArtifactTypeFactory.java | 14 +++--- .../mainui/nodes/TreeChildFactory.java | 36 +++++++++++---- .../autopsy/mainui/nodes/TreeNode.java | 2 +- .../mainui/nodes/ViewsTypeFactory.java | 30 ++++++------ 9 files changed, 95 insertions(+), 80 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index bb08d7b6af..21ac06ffe5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -376,7 +376,7 @@ public class FileSystemDAO extends AbstractDAO { Set affectedParentContent = new HashSet<>(); boolean refreshAllContent = false; - Content content = DAOEventUtils.getDerivedFileContentFromEvt(evt); + Content content = DAOEventUtils.getDerivedFileContentFromFileEvent(evt); if (content != null) { Content parentContent; try { diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index f3d5d09a7c..947bff365e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -118,8 +118,7 @@ public class TreeResultsDTO { } return true; } - - + } /** @@ -131,28 +130,27 @@ public class TreeResultsDTO { private final String displayName; private final String typeId; private final TreeDisplayCount count; - private final T typeData; + private final T searchParams; private final Object id; /** * Main constructor. * - * @param typeId The id of this item type. - * @param typeData Data for this particular row's type (i.e. - * BlackboardArtifact.Type for counts of a particular - * artifact type). - * @param id The id of this row. Can be any object that - * implements equals and hashCode. - * @param displayName The display name of this row. - * @param count The count of results for this row or null if not - * applicable. + * @param typeId The id of this item type. + * @param searchParams Search params for this tree item that can be used + * to display results. + * @param id The id of this row. Can be any object that + * implements equals and hashCode. + * @param displayName The display name of this row. + * @param count The count of results for this row or null if not + * applicable. */ - public TreeItemDTO(String typeId, T typeData, Object id, String displayName, TreeDisplayCount count) { + public TreeItemDTO(String typeId, T searchParams, Object id, String displayName, TreeDisplayCount count) { this.typeId = typeId; this.id = id; this.displayName = displayName; this.count = count; - this.typeData = typeData; + this.searchParams = searchParams; } /** @@ -171,12 +169,11 @@ public class TreeResultsDTO { /** * - * @return Data for this particular row's type (i.e. - * BlackboardArtifact.Type for counts of a particular artifact - * type). + * @return Search params for this tree item that can be used to display + * results. */ - public T getTypeData() { - return typeData; + public T getSearchParams() { + return searchParams; } /** diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index f51edb61be..b1f48e07d2 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -471,7 +471,7 @@ public class ViewsDAO extends AbstractDAO { name, TreeDisplayCount.getDeterminate(entry.getValue())); }) - .sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType())) + .sorted((a, b) -> stringCompare(a.getSearchParams().getMimeType(), b.getSearchParams().getMimeType())) .collect(Collectors.toList()); return new TreeResultsDTO<>(treeList); @@ -673,7 +673,7 @@ public class ViewsDAO extends AbstractDAO { Map>> mimeTypeDsMap = new HashMap<>(); Map> fileSizeDsMap = new HashMap<>(); - AbstractFile af = DAOEventUtils.getFileFromEvt(evt); + AbstractFile af = DAOEventUtils.getFileFromFileEvent(evt); if (af == null) { return Collections.emptySet(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java index c70b0ff34c..d6789f7b36 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/DAOEventUtils.java @@ -41,7 +41,7 @@ public class DAOEventUtils { */ public static Content getContentFromFileEvent(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); - Content derivedContent = getDerivedFileContentFromEvt(evt); + Content derivedContent = getDerivedFileContentFromFileEvent(evt); if (derivedContent != null) { return derivedContent; } else if (IngestManager.IngestModuleEvent.FILE_DONE.toString().equals(eventName) @@ -60,7 +60,7 @@ public class DAOEventUtils { * * @return The inner content or null if no content. */ - public static Content getDerivedFileContentFromEvt(PropertyChangeEvent evt) { + public static Content getDerivedFileContentFromFileEvent(PropertyChangeEvent evt) { String eventName = evt.getPropertyName(); if (IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString().equals(eventName) && (evt.getOldValue() instanceof ModuleContentEvent) @@ -80,7 +80,7 @@ public class DAOEventUtils { * * @return The inner file or null if no file found. */ - public static AbstractFile getFileFromEvt(PropertyChangeEvent evt) { + public static AbstractFile getFileFromFileEvent(PropertyChangeEvent evt) { Content content = getContentFromFileEvent(evt); return (content instanceof AbstractFile) ? ((AbstractFile) content) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java index 3b58de71d7..6c17eb5daf 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java @@ -82,9 +82,9 @@ public class AnalysisResultTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - if (SET_TREE_ARTIFACTS.contains(rowData.getTypeData().getArtifactType().getTypeID())) { - return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null)); - } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getTypeData().getArtifactType())) { + if (SET_TREE_ARTIFACTS.contains(rowData.getSearchParams().getArtifactType().getTypeID())) { + return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getSearchParams().getArtifactType(), dataSourceId, null)); + } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getSearchParams().getArtifactType())) { return new TreeTypeNode(rowData, new KeywordSetFactory(dataSourceId)); } else { return new AnalysisResultTypeTreeNode(rowData); @@ -92,7 +92,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -113,14 +113,14 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayAnalysisResult(this.getItemData().getTypeData()); + dataResultPanel.displayAnalysisResult(this.getItemData().getSearchParams()); } } @@ -135,8 +135,8 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData, ChildFactory childFactory) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData, Children.create(childFactory, true), getDefaultLookup(itemData)); @@ -179,7 +179,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -201,8 +201,8 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData, Children.LEAF, getDefaultLookup(itemData)); @@ -210,7 +210,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData, - Children.create(new KeywordSearchTermFactory(itemData.getTypeData()), true), + Children.create(new KeywordSearchTermFactory(itemData.getSearchParams()), true), getDefaultLookup(itemData)); } } @@ -276,7 +276,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -299,16 +299,16 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getSearchTerm(), + super(itemData.getSearchParams().getSearchTerm(), getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT), itemData, - itemData.getTypeData().hasChildren() ? Children.create(new KeywordFoundMatchFactory(itemData.getTypeData()), true) : Children.LEAF, + itemData.getSearchParams().hasChildren() ? Children.create(new KeywordFoundMatchFactory(itemData.getSearchParams()), true) : Children.LEAF, getDefaultLookup(itemData)); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - KeywordSearchTermParams searchParams = this.getItemData().getTypeData(); + KeywordSearchTermParams searchParams = this.getItemData().getSearchParams(); if (!searchParams.hasChildren()) { dataResultPanel.displayKeywordHits( @@ -356,7 +356,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -379,7 +379,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getKeywordMatch(), + super(itemData.getSearchParams().getKeywordMatch(), getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT), itemData, Children.LEAF, @@ -388,7 +388,7 @@ public class AnalysisResultTypeFactory extends TreeChildFactory getInvalidatedChild(TreeEvent daoEvt) { - if (daoEvt.getItemRecord().getTypeData() instanceof DataArtifactSearchParam) { + protected TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { + if (daoEvt.getItemRecord().getSearchParams() instanceof DataArtifactSearchParam) { @SuppressWarnings("unchecked") TreeItemDTO originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); - DataArtifactSearchParam searchParam = originalTreeItem.getTypeData(); + DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); if (this.dataSourceId == null || this.dataSourceId == searchParam.getDataSourceId()) { - return TreeChildFactory.getUpdatedTreeData(originalTreeItem, new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId())); + return TreeChildFactory.createTreeItemDTO(originalTreeItem, new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId())); } } return null; @@ -83,14 +83,14 @@ public class DataArtifactTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayDataArtifact(this.getItemData().getTypeData()); + dataResultPanel.displayDataArtifact(this.getItemData().getSearchParams()); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index b0c471a22c..ed3dc1aafc 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -52,7 +52,7 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable item = getInvalidatedChild(treeEvt); + TreeItemDTO item = getOrCreateRelevantChild(treeEvt); if (item != null) { if (treeEvt.isRefreshRequired()) { update(); @@ -75,11 +75,11 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable curResults = null; - + // All current child items (sorted). May have more items than curResults does because // this is updated based on events and new data. private List> curItemsList = new ArrayList<>(); - + // maps the Node key (ID) to its DTO private Map> idMapping = new HashMap<>(); @@ -125,7 +125,7 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable extends ChildFactory.Detachable extends ChildFactory.Detachable extends ChildFactory.Detachable TreeItemDTO getUpdatedTreeData(TreeItemDTO original, T updatedData) { + + /** + * A utility method that creates a TreeItemDTO using the data in 'original' + * for all fields except 'typeData' where 'updatedData' is used instead. + * + * @param original The original tree item dto. + * @param updatedData The new type data to use. + * + * @return The created tree item dto. + */ + static TreeItemDTO createTreeItemDTO(TreeItemDTO original, T updatedData) { return new TreeItemDTO<>( original.getTypeId(), updatedData, @@ -261,5 +270,14 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable getChildResults() throws IllegalArgumentException, ExecutionException; - protected abstract TreeItemDTO getInvalidatedChild(TreeEvent daoEvt); + /** + * Creates a child tree item dto that can be used to find the affected child + * node that requires updates. + * + * @param treeEvt The tree event. + * + * @return The tree item dto that can be used to find the child node + * affected by the tree event. + */ + protected abstract TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java index 70eb72ae08..a5ad30d4c2 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java @@ -45,7 +45,7 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo * @return The lookup to use in the node. */ protected static Lookup getDefaultLookup(TreeItemDTO itemData) { - return Lookups.fixed(itemData, itemData.getTypeData()); + return Lookups.fixed(itemData, itemData.getSearchParams()); } private TreeItemDTO itemData; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java index 21cc369db5..c4ddbf366e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java @@ -74,7 +74,7 @@ public class ViewsTypeFactory { } @Override - protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -95,12 +95,12 @@ public class ViewsTypeFactory { * @param itemData The data for the node. */ FileSizeTypeNode(TreeResultsDTO.TreeItemDTO itemData) { - super("FILE_SIZE_" + itemData.getTypeData().getSizeFilter().getName(), "org/sleuthkit/autopsy/images/file-size-16.png", itemData); + super("FILE_SIZE_" + itemData.getSearchParams().getSizeFilter().getName(), "org/sleuthkit/autopsy/images/file-size-16.png", itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayFileSizes(this.getItemData().getTypeData()); + dataResultPanel.displayFileSizes(this.getItemData().getSearchParams()); } } @@ -133,7 +133,7 @@ public class ViewsTypeFactory { } @Override - protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -152,10 +152,10 @@ public class ViewsTypeFactory { */ public FileMimePrefixNode(TreeResultsDTO.TreeItemDTO itemData) { super( - "FILE_MIME_" + itemData.getTypeData().getMimeType(), + "FILE_MIME_" + itemData.getSearchParams().getMimeType(), "org/sleuthkit/autopsy/images/file_types.png", itemData, - Children.create(new FileMimeSuffixFactory(itemData.getTypeData().getDataSourceId(), itemData.getTypeData().getMimeType()), true), + Children.create(new FileMimeSuffixFactory(itemData.getSearchParams().getDataSourceId(), itemData.getSearchParams().getMimeType()), true), getDefaultLookup(itemData)); } } @@ -193,7 +193,7 @@ public class ViewsTypeFactory { } @Override - protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { // GVDTODO return null; } @@ -215,14 +215,14 @@ public class ViewsTypeFactory { * @param itemData The data for the node. */ public FileMimeSuffixNode(TreeResultsDTO.TreeItemDTO itemData) { - super("FILE_MIME_" + itemData.getTypeData().getMimeType(), + super("FILE_MIME_" + itemData.getSearchParams().getMimeType(), "org/sleuthkit/autopsy/images/file-filter-icon.png", itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayFileMimes(this.getItemData().getTypeData()); + dataResultPanel.displayFileMimes(this.getItemData().getSearchParams()); } } @@ -261,9 +261,9 @@ public class ViewsTypeFactory { @Override protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { Collection childFilters; - if (rowData.getTypeData().getFilter() == FileExtRootFilter.TSK_DOCUMENT_FILTER) { + if (rowData.getSearchParams().getFilter() == FileExtRootFilter.TSK_DOCUMENT_FILTER) { childFilters = Stream.of(FileExtDocumentFilter.values()).collect(Collectors.toList()); - } else if (rowData.getTypeData().getFilter() == FileExtRootFilter.TSK_EXECUTABLE_FILTER) { + } else if (rowData.getSearchParams().getFilter() == FileExtRootFilter.TSK_EXECUTABLE_FILTER) { childFilters = Stream.of(FileExtExecutableFilter.values()).collect(Collectors.toList()); } else { childFilters = null; @@ -278,7 +278,7 @@ public class ViewsTypeFactory { } @Override - protected TreeResultsDTO.TreeItemDTO getInvalidatedChild(TreeEvent daoEvt) { + protected TreeResultsDTO.TreeItemDTO getOrCreateRelevantChild(TreeEvent daoEvt) { //GVDTODO return null; } @@ -304,10 +304,10 @@ public class ViewsTypeFactory { * children of this node. */ public FileExtNode(TreeResultsDTO.TreeItemDTO itemData, Collection childFilters) { - super("FILE_EXT_" + itemData.getTypeData().getFilter().getName(), + super("FILE_EXT_" + itemData.getSearchParams().getFilter().getName(), childFilters == null ? "org/sleuthkit/autopsy/images/file-filter-icon.png" : "org/sleuthkit/autopsy/images/file_types.png", itemData, - childFilters == null ? Children.LEAF : Children.create(new FileExtFactory(itemData.getTypeData().getDataSourceId(), childFilters), true), + childFilters == null ? Children.LEAF : Children.create(new FileExtFactory(itemData.getSearchParams().getDataSourceId(), childFilters), true), getDefaultLookup(itemData)); this.childFilters = childFilters; @@ -316,7 +316,7 @@ public class ViewsTypeFactory { @Override public void respondSelection(DataResultTopComponent dataResultPanel) { if (childFilters == null) { - dataResultPanel.displayFileExtensions(this.getItemData().getTypeData()); + dataResultPanel.displayFileExtensions(this.getItemData().getSearchParams()); } else { super.respondSelection(dataResultPanel); } From 8b7fc53c0c3b05e3bc3df5aa7f9a4a32d32e57e9 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 11:08:37 -0500 Subject: [PATCH 098/142] updates --- .../autopsy/mainui/datamodel/AnalysisResultDAO.java | 7 +++++-- .../autopsy/mainui/datamodel/DataArtifactDAO.java | 10 +++++----- .../autopsy/mainui/datamodel/TreeResultsDTO.java | 4 +++- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java index 3c034d66fa..2a5e03e103 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java @@ -856,9 +856,12 @@ public class AnalysisResultDAO extends BlackboardArtifactDAO { AnalysisResultSetEvent setEvt = (AnalysisResultSetEvent) arEvt; return new TreeEvent(getSetTreeItem(setEvt.getArtifactType(), setEvt.getDataSourceId(), setEvt.getSetName(), setEvt.getSetName() == null ? "" : setEvt.getSetName(), - TreeDisplayCount.INDETERMINATE), shouldRefresh); + shouldRefresh ? TreeDisplayCount.UNSPECIFIED : TreeDisplayCount.INDETERMINATE), + shouldRefresh); } else { - return new TreeEvent(getTreeItem(arEvt.getArtifactType(), arEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), shouldRefresh); + return new TreeEvent(getTreeItem(arEvt.getArtifactType(), arEvt.getDataSourceId(), + shouldRefresh ? TreeDisplayCount.UNSPECIFIED : TreeDisplayCount.INDETERMINATE), + shouldRefresh); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 9b79a0fe29..b56ba3501b 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -158,7 +158,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() .map(entry -> { - return getTreeItem(entry.getKey(), dataSourceId, + return createTreeItem(entry.getKey(), dataSourceId, indeterminateTypes.contains(entry.getKey()) ? TreeDisplayCount.INDETERMINATE : TreeDisplayCount.getDeterminate(entry.getValue())); @@ -230,7 +230,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } List newTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() - .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) + .map(daoEvt -> new TreeEvent(createTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) .collect(Collectors.toList()); return Stream.of(dataArtifactEvents, newTreeEvents) @@ -238,7 +238,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .collect(Collectors.toSet()); } - private TreeItemDTO getTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { + private TreeItemDTO createTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeResultsDTO.TreeItemDTO<>( BlackboardArtifact.Category.DATA_ARTIFACT.name(), new DataArtifactSearchParam(artifactType, dataSourceId), @@ -250,14 +250,14 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override Set handleIngestComplete() { return this.treeCounts.flushEvents().stream() - .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) + .map(daoEvt -> new TreeEvent(createTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) .collect(Collectors.toSet()); } @Override Set shouldRefreshTree() { return this.treeCounts.getEventTimeouts().stream() - .map(daoEvt -> new TreeEvent(getTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), true)) + .map(daoEvt -> new TreeEvent(createTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) .collect(Collectors.toSet()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java index 947bff365e..00a2f02b88 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/TreeResultsDTO.java @@ -52,7 +52,8 @@ public class TreeResultsDTO { public enum Type { DETERMINATE, INDETERMINATE, - NOT_SHOWN + NOT_SHOWN, + UNSPECIFIED } private final Type type; @@ -60,6 +61,7 @@ public class TreeResultsDTO { public static final TreeDisplayCount INDETERMINATE = new TreeDisplayCount(Type.INDETERMINATE, -1); public static final TreeDisplayCount NOT_SHOWN = new TreeDisplayCount(Type.NOT_SHOWN, -1); + public static final TreeDisplayCount UNSPECIFIED = new TreeDisplayCount(Type.UNSPECIFIED, -1); public static TreeDisplayCount getDeterminate(long count) { return new TreeDisplayCount(Type.DETERMINATE, count); From 817bbdd093434a642f02b86be69c7a4409f4051d Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 12:56:07 -0500 Subject: [PATCH 099/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../autopsy/ingest/IngestJobExecutor.java | 322 +++++++++--------- .../keywordsearch/IngestSearchRunner.java | 49 +-- 2 files changed, 197 insertions(+), 174 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 7a93b15b22..b3691903c1 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -33,6 +33,7 @@ import java.util.regex.Pattern; import java.util.stream.Stream; import javax.annotation.concurrent.GuardedBy; import javax.swing.JOptionPane; +import javax.swing.SwingUtilities; import org.netbeans.api.progress.ProgressHandle; import org.openide.util.Cancellable; import org.openide.util.NbBundle; @@ -41,6 +42,7 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.NetworkUtils; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.ingest.IngestTasksScheduler.IngestJobTasksSnapshot; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; @@ -190,21 +192,26 @@ final class IngestJobExecutor { /* * If running in the NetBeans thick client application version of Autopsy, - * NetBeans progress bars are used to display ingest job progress in the - * lower right hand corner of the main application window. A layer of - * abstraction to allow alternate representations of progress could be used - * here, as it is in other places in the application, to better decouple - * this object from the application's presentation layer. + * NetBeans progress handles (i.e., progress bars) are used to display + * ingest job progress in the lower right hand corner of the main + * application window. + * + * A layer of abstraction to allow alternate representations of progress + * could be used here, as it is in other places in the application (see + * implementations and usage of + * org.sleuthkit.autopsy.progress.ProgressIndicator interface), to better + * decouple this object from the application's presentation layer. */ + private volatile long estimatedFilesToProcess; + private volatile long processedFiles; private final boolean usingNetBeansGUI; - private final Object dataSourceIngestProgressLock = new Object(); - private ProgressHandle dataSourceIngestProgressBar; - private final Object fileIngestProgressLock = new Object(); + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private final List filesInProgress = new ArrayList<>(); - private long estimatedFilesToProcess; - private long processedFiles; + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) + private ProgressHandle dataSourceIngestProgressBar; + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle fileIngestProgressBar; - private final Object artifactIngestProgressLock = new Object(); + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle artifactIngestProgressBar; /* @@ -674,14 +681,10 @@ final class IngestJobExecutor { * how many files remain to be analyzed as each file ingest task * is completed. */ - long filesToProcess; if (files.isEmpty()) { - filesToProcess = dataSource.accept(new GetFilesCountVisitor()); + estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); } else { - filesToProcess = files.size(); - } - synchronized (fileIngestProgressLock) { - estimatedFilesToProcess = filesToProcess; + estimatedFilesToProcess = files.size(); } } @@ -781,17 +784,21 @@ final class IngestJobExecutor { if (hasFileIngestModules()) { /* - * Do a count of the files the data source processor has added - * to the case database. This number will be used to estimate - * how many files remain to be analyzed as each file ingest task - * is completed. + * For ingest job progress reporting purposes, do a count of the + * files the data source processor has added to the case + * database. This number will be used to estimate how many files + * remain to be analyzed as each file ingest task is completed. + * The estimate will likely be an over-estimate, since some of + * the files will have already been "streamed" to this job and + * processed. */ - long filesToProcess = dataSource.accept(new GetFilesCountVisitor()); - synchronized (fileIngestProgressLock) { - estimatedFilesToProcess = filesToProcess; - if (usingNetBeansGUI && fileIngestProgressBar != null) { - fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); - } + estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (fileIngestProgressBar != null) { + fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); + } + }); } } @@ -852,18 +859,20 @@ final class IngestJobExecutor { */ private void startArtifactIngestProgressBar() { if (usingNetBeansGUI) { - synchronized (artifactIngestProgressLock) { + SwingUtilities.invokeLater(() -> { String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); return true; } }); artifactIngestProgressBar.start(); artifactIngestProgressBar.switchToIndeterminate(); - } + }); } } @@ -876,34 +885,57 @@ final class IngestJobExecutor { * cancellation occurs is NOT discarded. */ private void startDataSourceIngestProgressBar() { - if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { - String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()); - dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { - /* - * The user has already pressed the cancel button on - * this progress bar, and the OK button of a cancelation - * confirmation dialog supplied by NetBeans. Find out - * whether the user wants to cancel only the currently - * executing data source ingest module or the entire - * ingest job. - */ - DataSourceIngestCancellationPanel panel = new DataSourceIngestCancellationPanel(); - String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title"); - JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE); - if (panel.cancelAllDataSourceIngestModules()) { + SwingUtilities.invokeLater(() -> { + String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()); + dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + /* + * The user has already pressed the cancel button on this + * progress bar, and the OK button of a cancelation + * confirmation dialog supplied by NetBeans. Find out + * whether the user wants to cancel only the currently + * executing data source ingest module or the entire ingest + * job. + */ + DataSourceIngestCancellationPanel panel = new DataSourceIngestCancellationPanel(); + String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title"); + JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE); + if (panel.cancelAllDataSourceIngestModules()) { + new Thread(() -> { IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); - } else { + }).start(); + } else { + new Thread(() -> { IngestJobExecutor.this.cancelCurrentDataSourceIngestModule(); - } - return true; + }).start(); } - }); - dataSourceIngestProgressBar.start(); - dataSourceIngestProgressBar.switchToIndeterminate(); - } + return true; + } + }); + dataSourceIngestProgressBar.start(); + dataSourceIngestProgressBar.switchToIndeterminate(); + }); + } + + private void finishProgressIndicators() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.finish(); + dataSourceIngestProgressBar = null; + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.finish(); + fileIngestProgressBar = null; + } + + if (artifactIngestProgressBar != null) { + artifactIngestProgressBar.finish(); + artifactIngestProgressBar = null; + } + }); } } @@ -915,20 +947,20 @@ final class IngestJobExecutor { * discarded. */ private void startFileIngestProgressBar() { - if (usingNetBeansGUI) { - synchronized (fileIngestProgressLock) { - String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()); - fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { + SwingUtilities.invokeLater(() -> { + String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()); + fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + new Thread(() -> { IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); - return true; - } - }); - fileIngestProgressBar.start(); - fileIngestProgressBar.switchToDeterminate((int) this.estimatedFilesToProcess); - } - } + }).start(); + return true; + } + }); + fileIngestProgressBar.start(); + fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); + }); } /** @@ -969,19 +1001,17 @@ final class IngestJobExecutor { } if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.finish(); dataSourceIngestProgressBar = null; } - } - synchronized (fileIngestProgressLock) { if (fileIngestProgressBar != null) { fileIngestProgressBar.finish(); fileIngestProgressBar = null; } - } + }); } if (!jobCancelled && hasLowPriorityDataSourceIngestModules()) { @@ -993,7 +1023,8 @@ final class IngestJobExecutor { } /** - * Shuts down the ingest module pipelines and progress bars. + * Shuts down the ingest module pipelines and ingest job progress + * indicators. */ private void shutDown() { synchronized (stageTransitionLock) { @@ -1002,29 +1033,7 @@ final class IngestJobExecutor { shutDownIngestModulePipeline(currentDataSourceIngestPipeline); shutDownIngestModulePipeline(artifactIngestPipeline); - - if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.finish(); - dataSourceIngestProgressBar = null; - } - } - - synchronized (fileIngestProgressLock) { - if (fileIngestProgressBar != null) { - fileIngestProgressBar.finish(); - fileIngestProgressBar = null; - } - } - - synchronized (artifactIngestProgressLock) { - if (artifactIngestProgressBar != null) { - artifactIngestProgressBar.finish(); - artifactIngestProgressBar = null; - } - } - } + finishProgressIndicators(); if (ingestJobInfo != null) { if (jobCancelled) { @@ -1114,16 +1123,17 @@ final class IngestJobExecutor { return; } - synchronized (fileIngestProgressLock) { - ++processedFiles; - if (usingNetBeansGUI) { + final String fileName = file.getName(); + processedFiles++; + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { if (processedFiles <= estimatedFilesToProcess) { - fileIngestProgressBar.progress(file.getName(), (int) processedFiles); + fileIngestProgressBar.progress(fileName, (int) processedFiles); } else { - fileIngestProgressBar.progress(file.getName(), (int) estimatedFilesToProcess); + fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); } - filesInProgress.add(file.getName()); - } + filesInProgress.add(fileName); + }); } /** @@ -1136,18 +1146,18 @@ final class IngestJobExecutor { } if (usingNetBeansGUI && !jobCancelled) { - synchronized (fileIngestProgressLock) { + SwingUtilities.invokeLater(() -> { /** * Update the file ingest progress bar again, in * case the file was being displayed. */ - filesInProgress.remove(file.getName()); + filesInProgress.remove(fileName); if (filesInProgress.size() > 0) { fileIngestProgressBar.progress(filesInProgress.get(0)); } else { fileIngestProgressBar.progress(""); } - } + }); } } fileIngestPipelinesQueue.put(pipeline); @@ -1254,11 +1264,11 @@ final class IngestJobExecutor { */ void updateDataSourceIngestProgressBarDisplayName(String displayName) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.setDisplayName(displayName); } - } + }); } } @@ -1272,11 +1282,11 @@ final class IngestJobExecutor { */ void switchDataSourceIngestProgressBarToDeterminate(int workUnits) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.switchToDeterminate(workUnits); } - } + }); } } @@ -1287,11 +1297,11 @@ final class IngestJobExecutor { */ void switchDataSourceIngestProgressBarToIndeterminate() { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.switchToIndeterminate(); } - } + }); } } @@ -1303,11 +1313,11 @@ final class IngestJobExecutor { */ void advanceDataSourceIngestProgressBar(int workUnits) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress("", workUnits); } - } + }); } } @@ -1319,11 +1329,11 @@ final class IngestJobExecutor { */ void advanceDataSourceIngestProgressBar(String currentTask) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress(currentTask); } - } + }); } } @@ -1337,11 +1347,11 @@ final class IngestJobExecutor { */ void advanceDataSourceIngestProgressBar(String currentTask, int workUnits) { if (usingNetBeansGUI && !jobCancelled) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress(currentTask, workUnits); } - } + }); } } @@ -1367,18 +1377,18 @@ final class IngestJobExecutor { cancelledDataSourceIngestModules.add(moduleDisplayName); if (usingNetBeansGUI) { - /** - * A new progress bar must be created because the cancel button of - * the previously constructed component is disabled by NetBeans when - * the user selects the "OK" button of the cancellation confirmation - * dialog popped up by NetBeans when the progress bar cancel button - * is pressed. - */ - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { + /** + * A new progress bar must be created because the cancel button + * of the previously constructed component is disabled by + * NetBeans when the user selects the "OK" button of the + * cancellation confirmation dialog popped up by NetBeans when + * the progress bar cancel button is pressed. + */ dataSourceIngestProgressBar.finish(); dataSourceIngestProgressBar = null; startDataSourceIngestProgressBar(); - } + }); } } @@ -1404,8 +1414,11 @@ final class IngestJobExecutor { } /** - * Requests cancellation of ingest, i.e., a shutdown of the data source - * level and file level ingest pipelines. + * Requests cancellation of the ingest job. All pending ingest tasks for the + * job will be cancelled, but any tasks already in progress in ingest + * threads will run to completion. This could take a while if the ingest + * modules executing the tasks are not checking the ingest job cancellation + * flag via the ingest joib context. * * @param reason The cancellation reason. */ @@ -1415,19 +1428,17 @@ final class IngestJobExecutor { IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this); if (usingNetBeansGUI) { - synchronized (dataSourceIngestProgressLock) { + SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName())); dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); } - } - synchronized (this.fileIngestProgressLock) { - if (null != this.fileIngestProgressBar) { - this.fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); - this.fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + if (fileIngestProgressBar != null) { + fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); + fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); } - } + }); } synchronized (threadRegistrationLock) { @@ -1437,15 +1448,13 @@ final class IngestJobExecutor { pausedIngestThreads.clear(); } - /* - * If a data source had no tasks in progress it may now be complete. - */ checkForStageCompleted(); } /** - * Queries whether or not cancellation, i.e., a shut down of the data source - * level and file level ingest pipelines for this job, has been requested. + * Queries whether or not cancellation of the ingest job has been requested. + * Ingest modules executing ingest tasks for this job should check this flag + * frequently via the ingest job context. * * @return True or false. */ @@ -1454,9 +1463,9 @@ final class IngestJobExecutor { } /** - * Gets the reason this job was cancelled. + * If the ingest job was cancelled, gets the reason this job was cancelled. * - * @return The cancellation reason, may be not cancelled. + * @return The cancellation reason, may be "not cancelled." */ IngestJob.CancellationReason getCancellationReason() { return cancellationReason; @@ -1549,20 +1558,25 @@ final class IngestJobExecutor { long snapShotTime = new Date().getTime(); IngestJobTasksSnapshot tasksSnapshot = null; if (includeIngestTasksSnapshot) { - synchronized (fileIngestProgressLock) { - processedFilesCount = processedFiles; - estimatedFilesToProcessCount = estimatedFilesToProcess; - snapShotTime = new Date().getTime(); - } + processedFilesCount = processedFiles; + estimatedFilesToProcessCount = estimatedFilesToProcess; + snapShotTime = new Date().getTime(); tasksSnapshot = taskScheduler.getTasksSnapshotForJob(getIngestJobId()); } - - return new Snapshot(dataSource.getName(), - getIngestJobId(), createTime, + return new Snapshot( + dataSource.getName(), + getIngestJobId(), + createTime, getCurrentDataSourceIngestModule(), - fileIngestRunning, fileIngestStartTime, - jobCancelled, cancellationReason, cancelledDataSourceIngestModules, - processedFilesCount, estimatedFilesToProcessCount, snapShotTime, tasksSnapshot); + fileIngestRunning, + fileIngestStartTime, + jobCancelled, + cancellationReason, + cancelledDataSourceIngestModules, + processedFilesCount, + estimatedFilesToProcessCount, + snapShotTime, + tasksSnapshot); } /** diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java index 383abbd3af..b31ba6ca33 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -248,7 +248,8 @@ final class IngestSearchRunner { } /** - * Task to perform periodic searches for each job (does a single index commit first) + * Task to perform periodic searches for each job (does a single index + * commit first) */ private final class PeriodicSearchTask implements Runnable { @@ -296,24 +297,23 @@ final class IngestSearchRunner { NbBundle.getMessage(this.getClass(), "SearchRunner.Searcher.done.err.msg"), ex.getMessage())); }// catch and ignore if we were cancelled - catch (java.util.concurrent.CancellationException ex) { + catch (java.util.concurrent.CancellationException ex) { } } } stopWatch.stop(); logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS - + // calculate "hold off" time recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG - + // schedule next PeriodicSearchTask jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS); - + // exit this thread return; } - - + private void recalculateUpdateIntervalTime(long lastSerchTimeSec) { // If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) { @@ -321,7 +321,7 @@ final class IngestSearchRunner { } // double the search interval currentUpdateIntervalMs = currentUpdateIntervalMs * 2; - logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000}); + logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs / 1000}); return; } } @@ -484,26 +484,35 @@ final class IngestSearchRunner { progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); } progressGroup.finish(); - return IngestSearchRunner.Searcher.this.cancel(true); + new Thread(() -> { + IngestSearchRunner.Searcher.this.cancel(true); + }).start(); + return true; } - }, null); + }, + null); updateKeywords(); - ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; - int i = 0; - for (Keyword keywordQuery : keywords) { - subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm()); - progressGroup.addContributor(subProgresses[i]); - i++; - } - - progressGroup.start(); + SwingUtilities.invokeLater(() -> { + ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; + int i = 0; + for (Keyword keywordQuery : keywords) { + subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm()); + progressGroup.addContributor(subProgresses[i]); + i++; + } + progressGroup.start(); + }); final StopWatch stopWatch = new StopWatch(); + stopWatch.start(); + try { - progressGroup.setDisplayName(displayName); + SwingUtilities.invokeLater(() -> { + progressGroup.setDisplayName(displayName); + }); int keywordsSearched = 0; From 2a272207dc0bd64028bb266be8b650ebce066a74 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 13:10:43 -0500 Subject: [PATCH 100/142] bug fix --- .../org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java index ac1bb6de3f..de174d3477 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileSystemDAO.java @@ -538,7 +538,7 @@ public class FileSystemDAO extends AbstractDAO { new FileSystemContentSearchParam(child.getId()), child, getNameForContent(child), - TreeDisplayCount.getDeterminate(countForNode) + countForNode == null ? TreeDisplayCount.NOT_SHOWN : TreeDisplayCount.getDeterminate(countForNode) )); } return new TreeResultsDTO<>(treeItemRows); From cd3f1a58ec24bff4ba43af0ac49d41281ba225c5 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 13:54:17 -0500 Subject: [PATCH 101/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index b3691903c1..8e0f6c7493 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -202,14 +202,14 @@ final class IngestJobExecutor { * org.sleuthkit.autopsy.progress.ProgressIndicator interface), to better * decouple this object from the application's presentation layer. */ - private volatile long estimatedFilesToProcess; - private volatile long processedFiles; private final boolean usingNetBeansGUI; @ThreadConfined(type = ThreadConfined.ThreadType.AWT) - private final List filesInProgress = new ArrayList<>(); - @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle dataSourceIngestProgressBar; @ThreadConfined(type = ThreadConfined.ThreadType.AWT) + private final List filesInProgress = new ArrayList<>(); + private volatile long estimatedFilesToProcess; + private volatile long processedFiles; + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle fileIngestProgressBar; @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle artifactIngestProgressBar; From 71b741802c00c3bb3e1d1709707ed7ed989d4f62 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 14:13:36 -0500 Subject: [PATCH 102/142] starting work --- .../mainui/datamodel/AccountSearchParams.java | 47 +++++ .../mainui/datamodel/DataArtifactDAO.java | 161 +++++++++++++++++- .../mainui/nodes/DataArtifactTypeFactory.java | 99 ++++++++++- 3 files changed, 295 insertions(+), 12 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java new file mode 100644 index 0000000000..da1c2c5406 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java @@ -0,0 +1,47 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * Search parameters for accounts. + */ +public class AccountSearchParams extends DataArtifactSearchParam { + + private final String accountType; + + /** + * Main constructor. + * + * @param accountType The account type identifier. + * @param dataSourceId The data source id to filter on or null. + */ + public AccountSearchParams(String accountType, Long dataSourceId) { + super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); + this.accountType = accountType; + } + + /** + * @return The account type identifier. + */ + public String getAccountType() { + return accountType; + } +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index e8436d7de9..224dd470ce 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; +import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Comparator; @@ -30,11 +31,14 @@ import java.util.Set; import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; +import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.SleuthkitCase; @@ -64,22 +68,22 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { - + SleuthkitCase skCase = getCase(); Blackboard blackboard = skCase.getBlackboard(); BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType(); String pagedWhereClause = getWhereClause(cacheKey); - + List arts = new ArrayList<>(); arts.addAll(blackboard.getDataArtifactsWhere(pagedWhereClause)); blackboard.loadBlackboardAttributes(arts); - - long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); - + + long totalResultsCount = getTotalResultsCount(cacheKey, arts.size()); + TableData tableData = createTableData(artType, arts); return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount); } @@ -110,6 +114,74 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } + /** + * Fetch data artifacts with the given account type from the database. + * + * @param searchParams The search params for the account type to fetch. + * + * @return The results. + * + * @throws NoCurrentCaseException + * @throws TskCoreException + */ + private DataArtifactTableSearchResultsDTO fetchAccounts(SearchParams searchParams) throws NoCurrentCaseException, TskCoreException { + + // TODO improve performance + SleuthkitCase skCase = getCase(); + Blackboard blackboard = skCase.getBlackboard(); + + Long dataSourceId = searchParams.getParamData().getDataSourceId(); + BlackboardArtifact.Type artType = searchParams.getParamData().getArtifactType(); + + // We currently can't make a query on the set name field because need to use a prepared statement + String originalWhereClause = " artifacts.artifact_type_id = " + artType.getTypeID() + " "; + if (dataSourceId != null) { + originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " "; + } + + String expectedAccountType = searchParams.getParamData().getAccountType(); + + List allAccounts = new ArrayList<>(); + allAccounts.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause)); + blackboard.loadBlackboardAttributes(allAccounts); + + // Filter for the selected set + List arts = new ArrayList<>(); + for (BlackboardArtifact art : allAccounts) { + BlackboardAttribute accountTypeAttr = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); + if ((expectedAccountType == null && accountTypeAttr == null) + || (expectedAccountType != null && accountTypeAttr != null && expectedAccountType.equals(accountTypeAttr.getValueString()))) { + arts.add(art); + } + } + + List pagedArtifacts = getPaged(arts, searchParams); + TableData tableData = createTableData(artType, pagedArtifacts); + return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, searchParams.getStartItem(), arts.size()); + } + + /** + * Gets the cached data or fetched data for the given account search params. + * + * @param searchParams The search params. + * @param startItem The starting item. + * @param maxCount The maximum count of items to return. + * + * @return The resulting data. + * + * @throws ExecutionException + * @throws IllegalArgumentException + */ + public DataArtifactTableSearchResultsDTO getAccountsForTable(AccountSearchParams searchParams, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { + if (searchParams.getDataSourceId() != null && searchParams.getDataSourceId() < 0) { + throw new IllegalArgumentException(MessageFormat.format("Data source id must be null or > 0.", + searchParams.getDataSourceId() == null ? "" : searchParams.getDataSourceId())); + } + + SearchParams pagedSearchParams = new SearchParams<>(searchParams, startItem, maxCount); + return dataArtifactCache.get(pagedSearchParams, () -> fetchAccounts(pagedSearchParams)); + } + public boolean isDataArtifactInvalidating(DataArtifactSearchParam key, ModuleDataEvent eventData) { return key.getArtifactType().equals(eventData.getBlackboardArtifactType()); } @@ -154,6 +226,60 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } + /** + * Returns the accounts and their counts in the current data source if a + * data source id is provided or all accounts if data source id is null. + * + * @param dataSourceId The data source id or null for no data source filter. + * + * @return The results. + * + * @throws ExecutionException + */ + public TreeResultsDTO getAccountsCounts(Long dataSourceId) throws ExecutionException { + String query = "SELECT res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n" + + "FROM (\n" + + " SELECT MIN(account_types.type_name) AS account_type, MIN(account_types.display_name) AS account_display_name\n" + + " FROM blackboard_artifacts\n" + + " LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n" + + " LEFT JOIN account_types ON blackboard_artifacts.value_text = account_types.type_name\n" + + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + "\n" + + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n" + + (dataSourceId != null && dataSourceId > 0 ? " AND blackboard_artifacts.data_source_obj_id = " + dataSourceId + " " : " ") + "\n" + + " -- group by artifact_id to ensure only one account type per artifact\n" + + " GROUP BY blackboard_artifacts.artifact_id\n" + + ") res\n" + + "GROUP BY res.account_type\n" + + "ORDER BY MIN(res.account_display_name)"; + + List> accountParams = new ArrayList<>(); + try { + getCase().getCaseDbAccessManager().select(query, (resultSet) -> { + try { + while (resultSet.next()) { + String accountType = resultSet.getString("account_type"); + String accountDisplayName = resultSet.getString("account_display_name"); + long count = resultSet.getLong("count"); + accountParams.add(new TreeItemDTO<>( + accountType, + new AccountSearchParams(accountType, dataSourceId), + accountType, + accountDisplayName, + count)); + } + } catch (SQLException ex) { + logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); + } + }); + + // return results + return new TreeResultsDTO<>(accountParams); + + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); + } + } + /* * Handles fetching and paging of data artifacts. */ @@ -183,4 +309,27 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return MainDAO.getInstance().getDataArtifactsDAO().isDataArtifactInvalidating(this.getParameters(), dataEvent); } } + + public static class AccountFetcher extends DAOFetcher { + + /** + * Main constructor. + * + * @param params Parameters to handle fetching of data. + */ + public AccountFetcher(AccountSearchParams params) { + super(params); + } + + @Override + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { + return MainDAO.getInstance().getDataArtifactsDAO().getAccountsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + } + + @Override + public boolean isRefreshRequired(PropertyChangeEvent evt) { + // TODO + return false; + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 50ef7cb987..c4185ed9f2 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -20,16 +20,19 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.beans.PropertyChangeEvent; import java.util.concurrent.ExecutionException; +import org.openide.nodes.Children; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; +import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.Category; @@ -56,7 +59,11 @@ public class DataArtifactTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - return new DataArtifactTypeTreeNode(rowData); + if (rowData.getTypeData().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + return new AccountTypeParentNode(rowData, this.dataSourceId); + } else { + return new DataArtifactTypeTreeNode(rowData); + } } @Override @@ -89,16 +96,16 @@ public class DataArtifactTypeFactory extends TreeChildFactory { - private static String getIconPath(BlackboardArtifact.Type artType) { - String iconPath = IconsUtil.getIconFilePath(artType.getTypeID()); - return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath; - } - public DataArtifactTypeTreeNode(TreeResultsDTO.TreeItemDTO itemData) { super(itemData.getTypeData().getArtifactType().getTypeName(), getIconPath(itemData.getTypeData().getArtifactType()), @@ -110,4 +117,84 @@ public class DataArtifactTypeFactory extends TreeChildFactory { + + /** + * Main constructor. + * + * @param itemData The data to display. + * @param dataSourceId The data source id to filter on or null if no + * data source filter. + */ + public AccountTypeParentNode(TreeResultsDTO.TreeItemDTO itemData, Long dataSourceId) { + super(itemData.getTypeData().getArtifactType().getTypeName(), + getIconPath(itemData.getTypeData().getArtifactType()), + itemData, + Children.create(new AccountTypeFactory(dataSourceId), true), + getDefaultLookup(itemData)); + } + } + + /** + * Factory for displaying account types. + */ + static class AccountTypeFactory extends TreeChildFactory { + + private final Long dataSourceId; + + /** + * Main constructor. + * + * @param dataSourceId The data source object id for which the results + * should be filtered or null if no data source + * filtering. + */ + public AccountTypeFactory(Long dataSourceId) { + this.dataSourceId = dataSourceId; + } + + @Override + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + return MainDAO.getInstance().getDataArtifactsDAO().getAccountsCounts(this.dataSourceId); + } + + @Override + public boolean isRefreshRequired(PropertyChangeEvent evt) { + // GVDTODO + return false; + } + + @Override + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + return new AccountTypeNode(rowData); + } + } + + /** + * A node representing a single account type in the tree. + */ + static class AccountTypeNode extends TreeNode { + + /** + * Main constructor. + * + * @param itemData The data to display. + */ + public AccountTypeNode(TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getTypeData().getArtifactType().getTypeName(), + getIconPath(itemData.getTypeData().getArtifactType()), + itemData, + Children.LEAF, + getDefaultLookup(itemData)); + } + + @Override + public void respondSelection(DataResultTopComponent dataResultPanel) { + // TODO action for clicking on a single account type. + } + } } From 1fb844442421898d30eb4758f35555ee9d6f9a87 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 14:34:59 -0500 Subject: [PATCH 103/142] bug fixes --- .../autopsy/mainui/nodes/DataArtifactTypeFactory.java | 4 +++- Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 57e9b04747..a52e862022 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.util.concurrent.ExecutionException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; +import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; @@ -60,7 +61,8 @@ public class DataArtifactTypeFactory extends TreeChildFactory originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); - if (this.dataSourceId == null || this.dataSourceId == searchParam.getDataSourceId()) { + if ((this.dataSourceId == null || this.dataSourceId == searchParam.getDataSourceId()) && + !DataArtifactDAO.getIgnoredTreeTypes().contains(searchParam.getArtifactType())) { return TreeChildFactory.createTreeItemDTO(originalTreeItem, new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId())); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java index a5ad30d4c2..7fd7cf4593 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java @@ -120,7 +120,7 @@ public abstract class TreeNode extends AbstractNode implements SelectionRespo public void update(TreeItemDTO updatedData) { if (updatedData == null) { logger.log(Level.WARNING, "Expected non-null updatedData"); - } else if (this.itemData != null && this.itemData.getId() != updatedData.getId()) { + } else if (this.itemData != null && !Objects.equals(this.itemData.getId(), updatedData.getId())) { logger.log(Level.WARNING, MessageFormat.format( "Expected update data to have same id but received [id: {0}] replacing [id: {1}]", updatedData.getId(), From 087c70d492d66eb016c6e5876f24fedcb23a12d6 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 14:39:48 -0500 Subject: [PATCH 104/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../sleuthkit/autopsy/ingest/IngestJob.java | 2 +- .../autopsy/ingest/IngestJobExecutor.java | 214 +++++++++--------- .../autopsy/ingest/IngestManager.java | 2 +- 3 files changed, 113 insertions(+), 105 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index 37e4b549ee..a508110b99 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -173,7 +173,7 @@ public final class IngestJob { void processStreamingIngestDataSource() { if (ingestMode == Mode.STREAMING) { if (ingestModuleExecutor != null) { - ingestModuleExecutor.startStreamingModeDataSourceAnalysis(); + ingestModuleExecutor.addStreamedDataSource(); } else { logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline"); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 8e0f6c7493..160173c9c8 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -541,7 +541,7 @@ final class IngestJobExecutor { } /** - * Determnines which inges job stage to start in and starts up the ingest + * Determnines which ingets job stage to start in and starts up the ingest * module pipelines. * * @return A collection of ingest module startup errors, empty on success. @@ -671,7 +671,7 @@ final class IngestJobExecutor { */ private void startBatchModeAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage(String.format("Starting analysis in batch mode for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS + logInfoMessage("Starting ingest job in batch mode"); //NON-NLS stage = IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; if (hasFileIngestModules()) { @@ -686,22 +686,15 @@ final class IngestJobExecutor { } else { estimatedFilesToProcess = files.size(); } + startFileIngestProgressBar(); } - if (usingNetBeansGUI) { - /* - * Start ingest progress bars in the lower right hand corner of - * the main application window. - */ - if (hasFileIngestModules()) { - startFileIngestProgressBar(); - } - if (hasHighPriorityDataSourceIngestModules()) { - startDataSourceIngestProgressBar(); - } - if (hasDataArtifactIngestModules()) { - startArtifactIngestProgressBar(); - } + if (hasHighPriorityDataSourceIngestModules()) { + startDataSourceIngestProgressBar(); + } + + if (hasDataArtifactIngestModules()) { + startArtifactIngestProgressBar(); } /* @@ -711,60 +704,69 @@ final class IngestJobExecutor { currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; /* - * Schedule ingest tasks and then immediately check for stage - * completion. This is necessary because it is possible that zero - * tasks will actually make it to task execution due to the file - * filter or other ingest job settings. In that case, there will - * never be a stage completion check in an ingest thread executing - * an ingest task, so such a job would run forever without a check - * here. + * Schedule ingest tasks. */ if (!files.isEmpty() && hasFileIngestModules()) { taskScheduler.scheduleFileIngestTasks(this, files); } else if (hasHighPriorityDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) { taskScheduler.scheduleIngestTasks(this); } + + /* + * Check for stage completion. This is necessary because it is + * possible that none of the tasks that were just scheduled will + * actually make it to task execution due to the file filter or + * other ingest job settings. In that case, there will never be a + * stage completion check in an ingest thread executing an ingest + * task, so such a job would run forever without a check here. + */ checkForStageCompleted(); } } /** * Starts analysis for a streaming mode ingest job. For a streaming mode - * job, the data source processor streams files in as it adds them to the - * case database and file analysis can begin before data source level - * analysis. + * job, a data source processor streams files to this ingest job executor as + * it adds the files to the case database, and file level analysis can begin + * before data source level analysis. */ private void startStreamingModeAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage("Starting data source level analysis in streaming mode"); //NON-NLS + logInfoMessage("Starting ingest job in streaming mode"); //NON-NLS stage = IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY; - if (usingNetBeansGUI) { + if (hasFileIngestModules()) { /* - * Start ingest progress bars in the lower right hand corner of - * the main application window. + * Start the file ingest progress bar, but do not schedule any + * file or data source ingest tasks. File ingest tasks will + * instead be scheduled as files are streamed in via + * addStreamedFiles(), and a data source ingest task will be + * scheduled later, via addStreamedDataSource(). + * + * Note that because estimated files remaining to process still + * has its initial value of zero, the fle ingest progress bar + * will start in the "indeterminate" state. A rough estimate of + * the files to processed will be computed later, when all of + * the files have been added to the case database, as signaled + * by a call to the addStreamedDataSource(). */ - if (hasFileIngestModules()) { - /* - * Note that because estimated files remaining to process - * still has its initial value of zero, the progress bar - * will start in the "indeterminate" state. An estimate of - * the files to process can be computed later, when all of - * the files have been added ot the case database. - */ - startFileIngestProgressBar(); - } - if (hasDataArtifactIngestModules()) { - startArtifactIngestProgressBar(); - } + estimatedFilesToProcess = 0; + startFileIngestProgressBar(); } if (hasDataArtifactIngestModules()) { + startArtifactIngestProgressBar(); + /* * Schedule artifact ingest tasks for any artifacts currently in * the case database. This needs to be done before any files or * the data source are streamed in to avoid analyzing the data * artifacts added to the case database by those tasks twice. + * This constraint is implemented by restricting construction of + * a streaming mode IngestJob to + * IngestManager.openIngestStream(), which constructs and starts + * the job before returning the IngestStream that is used to + * stream in the files and data source. */ taskScheduler.scheduleDataArtifactIngestTasks(this); } @@ -776,7 +778,7 @@ final class IngestJobExecutor { * case database and streamed in, and the data source is now ready for * analysis. */ - void startStreamingModeDataSourceAnalysis() { + void addStreamedDataSource() { synchronized (stageTransitionLock) { logInfoMessage("Starting full first stage analysis in streaming mode"); //NON-NLS stage = IngestJobExecutor.IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; @@ -859,20 +861,22 @@ final class IngestJobExecutor { */ private void startArtifactIngestProgressBar() { if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); - artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { - new Thread(() -> { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); - }).start(); - return true; - } + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); + artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); + return true; + } + }); + artifactIngestProgressBar.start(); + artifactIngestProgressBar.switchToIndeterminate(); }); - artifactIngestProgressBar.start(); - artifactIngestProgressBar.switchToIndeterminate(); - }); + } } } @@ -885,37 +889,39 @@ final class IngestJobExecutor { * cancellation occurs is NOT discarded. */ private void startDataSourceIngestProgressBar() { - SwingUtilities.invokeLater(() -> { - String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()); - dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { - /* - * The user has already pressed the cancel button on this - * progress bar, and the OK button of a cancelation - * confirmation dialog supplied by NetBeans. Find out - * whether the user wants to cancel only the currently - * executing data source ingest module or the entire ingest - * job. - */ - DataSourceIngestCancellationPanel panel = new DataSourceIngestCancellationPanel(); - String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title"); - JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE); - if (panel.cancelAllDataSourceIngestModules()) { - new Thread(() -> { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); - }).start(); - } else { - new Thread(() -> { - IngestJobExecutor.this.cancelCurrentDataSourceIngestModule(); - }).start(); + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()); + dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + /* + * The user has already pressed the cancel button on + * this progress bar, and the OK button of a cancelation + * confirmation dialog supplied by NetBeans. Find out + * whether the user wants to cancel only the currently + * executing data source ingest module or the entire + * ingest job. + */ + DataSourceIngestCancellationPanel panel = new DataSourceIngestCancellationPanel(); + String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title"); + JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE); + if (panel.cancelAllDataSourceIngestModules()) { + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); + } else { + new Thread(() -> { + IngestJobExecutor.this.cancelCurrentDataSourceIngestModule(); + }).start(); + } + return true; } - return true; - } + }); + dataSourceIngestProgressBar.start(); + dataSourceIngestProgressBar.switchToIndeterminate(); }); - dataSourceIngestProgressBar.start(); - dataSourceIngestProgressBar.switchToIndeterminate(); - }); + } } private void finishProgressIndicators() { @@ -947,20 +953,22 @@ final class IngestJobExecutor { * discarded. */ private void startFileIngestProgressBar() { - SwingUtilities.invokeLater(() -> { - String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()); - fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { - new Thread(() -> { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); - }).start(); - return true; - } + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()); + fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); + return true; + } + }); + fileIngestProgressBar.start(); + fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); }); - fileIngestProgressBar.start(); - fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); - }); + } } /** @@ -1478,7 +1486,7 @@ final class IngestJobExecutor { * @param message The message. */ private void logInfoMessage(String message) { - logger.log(Level.INFO, String.format("%s (data source = %s, object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS + logger.log(Level.INFO, String.format("%s (data source = %s, data source object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS } /** @@ -1490,7 +1498,7 @@ final class IngestJobExecutor { * @param throwable The throwable associated with the error. */ private void logErrorMessage(Level level, String message, Throwable throwable) { - logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS + logger.log(level, String.format("%s (data source = %s, data source object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS } /** @@ -1501,7 +1509,7 @@ final class IngestJobExecutor { * @param message The message. */ private void logErrorMessage(Level level, String message) { - logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS + logger.log(level, String.format("%s (data source = %s, data source object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS } /** @@ -1523,7 +1531,7 @@ final class IngestJobExecutor { */ private void logIngestModuleErrors(List errors, AbstractFile file) { for (IngestModuleError error : errors) { - logErrorMessage(Level.SEVERE, String.format("%s experienced an error during analysis while processing file %s, object ID %d", error.getModuleDisplayName(), file.getName(), file.getId()), error.getThrowable()); //NON-NLS + logErrorMessage(Level.SEVERE, String.format("%s experienced an error during analysis while processing file %s (object ID = %d)", error.getModuleDisplayName(), file.getName(), file.getId()), error.getThrowable()); //NON-NLS } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 2c87487232..0dc2597481 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -1050,7 +1050,7 @@ public class IngestManager implements IngestProgressSnapshotProvider { } /** - * Creates and starts an ingest job for a collection of data sources. + * Creates and starts an ingest job. */ private final class StartIngestJobTask implements Callable { From 280580042ee7cf54c200d0b228540cb0a6828105 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 15:13:14 -0500 Subject: [PATCH 105/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../autopsy/ingest/IngestJobExecutor.java | 190 ++++++++++-------- 1 file changed, 103 insertions(+), 87 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 160173c9c8..43fed4ce58 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -795,38 +795,31 @@ final class IngestJobExecutor { * processed. */ estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); - if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - if (fileIngestProgressBar != null) { - fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); - } - }); - } + switchFileIngestProgressBarToDeterminate(); } - if (usingNetBeansGUI) { + currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; + if (hasHighPriorityDataSourceIngestModules()) { /* * Start a data source level ingest progress bar in the lower * right hand corner of the main application window. The file * and data artifact ingest progress bars were already started * in startStreamingModeAnalysis(). */ - if (hasHighPriorityDataSourceIngestModules()) { - startDataSourceIngestProgressBar(); - } - } + startDataSourceIngestProgressBar(); - currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; - if (hasHighPriorityDataSourceIngestModules()) { + /* + * Schedule a task for the data source. + */ IngestJobExecutor.taskScheduler.scheduleDataSourceIngestTask(this); } else { /* - * If no data source level ingest task is scheduled at this time - * and all of the file level and artifact ingest tasks scheduled - * during the initial file streaming stage have already - * executed, there will never be a stage completion check in an - * ingest thread executing an ingest task, so such a job would - * run forever without a check here. + * If no data source level ingest task is scheduled at this + * time, and all of the file level and artifact ingest tasks + * scheduled during the initial file streaming stage have + * already been executed, there will never be a stage completion + * check in an ingest thread executing an ingest task, so such a + * job would run forever without a check here. */ checkForStageCompleted(); } @@ -839,13 +832,9 @@ final class IngestJobExecutor { private void startLowPriorityDataSourceAnalysis() { synchronized (stageTransitionLock) { if (hasLowPriorityDataSourceIngestModules()) { - logInfoMessage(String.format("Starting low priority data source analysis for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS + logInfoMessage("Starting low priority data source analysis"); //NON-NLS stage = IngestJobExecutor.IngestJobStage.LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; - - if (usingNetBeansGUI) { - startDataSourceIngestProgressBar(); - } - + startDataSourceIngestProgressBar(); currentDataSourceIngestPipeline = lowPriorityDataSourceIngestPipeline; taskScheduler.scheduleDataSourceIngestTask(this); } @@ -853,38 +842,36 @@ final class IngestJobExecutor { } /** - * Starts a data artifacts analysis NetBeans progress bar in the lower right - * hand corner of the main application window. The progress bar provides the - * user with a task cancellation button. Pressing it cancels the ingest job. - * Analysis already completed at the time that cancellation occurs is NOT - * discarded. + * Starts a NetBeans progress bar for data artifacts analysis in the lower + * right hand corner of the main application window. The progress bar + * provides the user with a task cancellation button. Pressing it cancels + * the ingest job. Analysis already completed at the time that cancellation + * occurs is NOT discarded. */ private void startArtifactIngestProgressBar() { if (usingNetBeansGUI) { - if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); - artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { - new Thread(() -> { - IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); - }).start(); - return true; - } - }); - artifactIngestProgressBar.start(); - artifactIngestProgressBar.switchToIndeterminate(); + SwingUtilities.invokeLater(() -> { + String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); + artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + new Thread(() -> { + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + }).start(); + return true; + } }); - } + artifactIngestProgressBar.start(); + artifactIngestProgressBar.switchToIndeterminate(); + }); } } /** - * Starts a data source level analysis NetBeans progress bar in the lower - * right hand corner of the main application window. The progress bar + * Starts a NetBeans progress bar for data source level analysis in the + * lower right hand corner of the main application window. The progress bar * provides the user with a task cancellation button. Pressing it cancels - * either the currently running data source level ingest module or the + * either the currently running data source level ingest module, or the * entire ingest job. Analysis already completed at the time that * cancellation occurs is NOT discarded. */ @@ -924,29 +911,8 @@ final class IngestJobExecutor { } } - private void finishProgressIndicators() { - if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.finish(); - dataSourceIngestProgressBar = null; - } - - if (fileIngestProgressBar != null) { - fileIngestProgressBar.finish(); - fileIngestProgressBar = null; - } - - if (artifactIngestProgressBar != null) { - artifactIngestProgressBar.finish(); - artifactIngestProgressBar = null; - } - }); - } - } - /** - * Starts a file analysis NetBeans progress bar in the lower right hand + * Starts a NetBeans progress bar for file analysis in the lower right hand * corner of the main application window. The progress bar provides the user * with a task cancellation button. Pressing it cancels the ingest job. * Analysis already completed at the time that cancellation occurs is NOT @@ -971,6 +937,49 @@ final class IngestJobExecutor { } } + /** + * Finishes the first stage progress bars. + */ + private void finishFirstStageProgressBars() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.finish(); + dataSourceIngestProgressBar = null; + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.finish(); + fileIngestProgressBar = null; + } + }); + } + } + + /** + * Finishes all current progress bars. + */ + private void finishProgressBars() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.finish(); + dataSourceIngestProgressBar = null; + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.finish(); + fileIngestProgressBar = null; + } + + if (artifactIngestProgressBar != null) { + artifactIngestProgressBar.finish(); + artifactIngestProgressBar = null; + } + }); + } + } + /** * Checks to see if the ingest tasks for the current stage of this job are * completed and does a stage transition if they are. @@ -1008,19 +1017,7 @@ final class IngestJobExecutor { shutDownIngestModulePipeline(pipeline); } - if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.finish(); - dataSourceIngestProgressBar = null; - } - - if (fileIngestProgressBar != null) { - fileIngestProgressBar.finish(); - fileIngestProgressBar = null; - } - }); - } + finishFirstStageProgressBars(); if (!jobCancelled && hasLowPriorityDataSourceIngestModules()) { startLowPriorityDataSourceAnalysis(); @@ -1041,7 +1038,7 @@ final class IngestJobExecutor { shutDownIngestModulePipeline(currentDataSourceIngestPipeline); shutDownIngestModulePipeline(artifactIngestPipeline); - finishProgressIndicators(); + finishProgressBars(); if (ingestJobInfo != null) { if (jobCancelled) { @@ -1363,6 +1360,20 @@ final class IngestJobExecutor { } } + /** + * Switches the file ingest progress bar to determinate mode, using the + * estimated number of files to process as the number of work units. + */ + private void switchFileIngestProgressBarToDeterminate() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (fileIngestProgressBar != null) { + fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); + } + }); + } + } + /** * Queries whether or not a temporary cancellation of data source level * ingest in order to stop the currently executing data source level ingest @@ -1383,7 +1394,6 @@ final class IngestJobExecutor { void currentDataSourceIngestModuleCancellationCompleted(String moduleDisplayName) { currentDataSourceIngestModuleCancelled = false; cancelledDataSourceIngestModules.add(moduleDisplayName); - if (usingNetBeansGUI) { SwingUtilities.invokeLater(() -> { /** @@ -1433,7 +1443,6 @@ final class IngestJobExecutor { void cancel(IngestJob.CancellationReason reason) { jobCancelled = true; cancellationReason = reason; - IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this); if (usingNetBeansGUI) { SwingUtilities.invokeLater(() -> { @@ -1446,9 +1455,16 @@ final class IngestJobExecutor { fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); } + + if (artifactIngestProgressBar != null) { + artifactIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataArtifactIngest.displayName", dataSource.getName())); + artifactIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } }); } + IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this); + synchronized (threadRegistrationLock) { for (Thread thread : pausedIngestThreads) { thread.interrupt(); From c4b703ae0c54d72b784bbe70e99461f02baa2963 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 15:14:59 -0500 Subject: [PATCH 106/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../keywordsearch/IngestSearchRunner.java | 49 ++++++++----------- 1 file changed, 20 insertions(+), 29 deletions(-) mode change 100644 => 100755 KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java old mode 100644 new mode 100755 index b31ba6ca33..383abbd3af --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -248,8 +248,7 @@ final class IngestSearchRunner { } /** - * Task to perform periodic searches for each job (does a single index - * commit first) + * Task to perform periodic searches for each job (does a single index commit first) */ private final class PeriodicSearchTask implements Runnable { @@ -297,23 +296,24 @@ final class IngestSearchRunner { NbBundle.getMessage(this.getClass(), "SearchRunner.Searcher.done.err.msg"), ex.getMessage())); }// catch and ignore if we were cancelled - catch (java.util.concurrent.CancellationException ex) { + catch (java.util.concurrent.CancellationException ex) { } } } stopWatch.stop(); logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS - + // calculate "hold off" time recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG - + // schedule next PeriodicSearchTask jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS); - + // exit this thread return; } - + + private void recalculateUpdateIntervalTime(long lastSerchTimeSec) { // If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) { @@ -321,7 +321,7 @@ final class IngestSearchRunner { } // double the search interval currentUpdateIntervalMs = currentUpdateIntervalMs * 2; - logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs / 1000}); + logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000}); return; } } @@ -484,35 +484,26 @@ final class IngestSearchRunner { progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); } progressGroup.finish(); - new Thread(() -> { - IngestSearchRunner.Searcher.this.cancel(true); - }).start(); - return true; + return IngestSearchRunner.Searcher.this.cancel(true); } - }, - null); + }, null); updateKeywords(); - SwingUtilities.invokeLater(() -> { - ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; - int i = 0; - for (Keyword keywordQuery : keywords) { - subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm()); - progressGroup.addContributor(subProgresses[i]); - i++; - } - progressGroup.start(); - }); + ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; + int i = 0; + for (Keyword keywordQuery : keywords) { + subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm()); + progressGroup.addContributor(subProgresses[i]); + i++; + } + + progressGroup.start(); final StopWatch stopWatch = new StopWatch(); - stopWatch.start(); - try { - SwingUtilities.invokeLater(() -> { - progressGroup.setDisplayName(displayName); - }); + progressGroup.setDisplayName(displayName); int keywordsSearched = 0; From 30434d2a11cf53f08f3f6af9d9c426b746794838 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 15:33:22 -0500 Subject: [PATCH 107/142] fixes --- .../corecomponents/DataResultPanel.java | 30 +++++++++++-- .../DataResultTopComponent.java | 11 +++++ .../autopsy/datamodel/utils/IconsUtil.java | 5 ++- .../mainui/datamodel/AccountSearchParams.java | 29 ++++++++++++ .../mainui/datamodel/DataArtifactDAO.java | 13 +++--- .../mainui/nodes/DataArtifactTypeFactory.java | 45 ++++++++++++++++--- 6 files changed, 119 insertions(+), 14 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 84035a1624..64e740be6c 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -62,11 +62,13 @@ import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageCountChangeEvent; import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageSizeChangeEvent; import org.sleuthkit.autopsy.datamodel.NodeSelectionInfo; import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSetFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactAccountFetcher; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; @@ -512,7 +514,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C // if search result root node, it's fine; otherwise, wrap in result // viewer filter node to make sure there are no grandchildren - this.currentRootNode = (rootNode instanceof SearchResultRootNode) + this.currentRootNode = (rootNode instanceof SearchResultRootNode) ? rootNode : new ResultViewerFilterParentNode(rootNode); @@ -1187,6 +1189,27 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C } } + /** + * Displays results for querying the DAO for accounts matching the search + * parameters query. + * + * @param accountParams The search parameter query. + */ + void displayAccounts(AccountSearchParams accountParams) { + try { + this.searchResultManager = new SearchManager(new DataArtifactAccountFetcher(accountParams), getPageSize()); + SearchResultsDTO results = searchResultManager.getResults(); + displaySearchResults(results, true); + } catch (ExecutionException ex) { + logger.log(Level.WARNING, + MessageFormat.format("There was an error displaying search results for [artifact type: {0}, data source id: {1}, account type: {2}]", + accountParams.getArtifactType(), + accountParams.getDataSourceId() == null ? "" : accountParams.getDataSourceId(), + accountParams.getAccountType()), + ex); + } + } + void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) { try { this.searchResultManager = new SearchManager(new AnalysisResultFetcher(analysisResultParams), getPageSize()); @@ -1321,7 +1344,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ex); } } - + /** * Displays results of querying the DAO for the given search parameters * query. @@ -1361,7 +1384,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C ex); } } - + /** * Displays results of querying the DAO for the given search parameters * query. @@ -1513,6 +1536,7 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C /** * Main constructor. + * * @param original The original node to wrap. */ ResultViewerFilterParentNode(Node original) { diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java index 2be5daea6d..1f6d79aa37 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java @@ -42,6 +42,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.directorytree.ExternalViewerShortcutAction; +import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam; @@ -481,6 +482,16 @@ public final class DataResultTopComponent extends TopComponent implements DataRe public void displayOsAccounts(OsAccountsSearchParams osAccountParams) { dataResultPanel.displayOsAccount(osAccountParams); } + + /** + * Displays results for querying the DAO for accounts matching the search + * parameters query. + * + * @param accountParams The search parameter query. + */ + public void displayAccounts(AccountSearchParams accountParams) { + dataResultPanel.displayAccounts(accountParams); + } @Override public void setTitle(String title) { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java b/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java index ce3a6763d9..3d4247701c 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/utils/IconsUtil.java @@ -140,8 +140,11 @@ public final class IconsUtil { } else if (typeID == BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID()) { imageFile = "keyword_hits.png"; } else if (typeID == BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() - || typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID()) { + || typeID == BlackboardArtifact.Type.TSK_INTERESTING_FILE_HIT.getTypeID() + || typeID == BlackboardArtifact.Type.TSK_INTERESTING_ITEM.getTypeID()) { imageFile = "interesting_item.png"; + } else if (typeID == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + imageFile = "accounts.png"; } else { imageFile = "artifact-icon.png"; //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java index da1c2c5406..9caa2513d3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import java.util.Objects; import org.sleuthkit.datamodel.BlackboardArtifact; /** @@ -44,4 +45,32 @@ public class AccountSearchParams extends DataArtifactSearchParam { public String getAccountType() { return accountType; } + + @Override + public int hashCode() { + int hash = 5; + hash = 37 * hash + Objects.hashCode(this.accountType); + hash = 37 * hash + super.hashCode(); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final AccountSearchParams other = (AccountSearchParams) obj; + if (!Objects.equals(this.accountType, other.accountType)) { + return false; + } + return super.equals(obj); + } + + } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 224dd470ce..3cb7d754fd 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -142,7 +142,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { String expectedAccountType = searchParams.getParamData().getAccountType(); List allAccounts = new ArrayList<>(); - allAccounts.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause)); + allAccounts.addAll(blackboard.getDataArtifactsWhere(originalWhereClause)); blackboard.loadBlackboardAttributes(allAccounts); // Filter for the selected set @@ -237,12 +237,12 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { * @throws ExecutionException */ public TreeResultsDTO getAccountsCounts(Long dataSourceId) throws ExecutionException { - String query = "SELECT res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n" + String query = "res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n" + "FROM (\n" + " SELECT MIN(account_types.type_name) AS account_type, MIN(account_types.display_name) AS account_display_name\n" + " FROM blackboard_artifacts\n" + " LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n" - + " LEFT JOIN account_types ON blackboard_artifacts.value_text = account_types.type_name\n" + + " LEFT JOIN account_types ON blackboard_attributes.value_text = account_types.type_name\n" + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + "\n" + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n" + (dataSourceId != null && dataSourceId > 0 ? " AND blackboard_artifacts.data_source_obj_id = " + dataSourceId + " " : " ") + "\n" @@ -310,14 +310,17 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - public static class AccountFetcher extends DAOFetcher { + /** + * Handles fetching and paging of account data artifacts. + */ + public static class DataArtifactAccountFetcher extends DAOFetcher { /** * Main constructor. * * @param params Parameters to handle fetching of data. */ - public AccountFetcher(AccountSearchParams params) { + public DataArtifactAccountFetcher(AccountSearchParams params) { super(params); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index c4185ed9f2..ae6ffe3ae6 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -33,6 +33,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; +import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.Category; @@ -179,6 +180,42 @@ public class DataArtifactTypeFactory extends TreeChildFactory { + private static final String ICON_BASE_PATH = "org/sleuthkit/autopsy/images/"; //NON-NLS + + /** + * Get the path of the icon for the given Account Type. + * + * @return The path of the icon for the given Account Type. + */ + public static String getAccountIconPath(String accountType) { + + if (accountType.equals(Account.Type.CREDIT_CARD.getTypeName())) { + return ICON_BASE_PATH + "credit-card.png"; + } else if (accountType.equals(Account.Type.DEVICE.getTypeName())) { + return ICON_BASE_PATH + "image.png"; + } else if (accountType.equals(Account.Type.EMAIL.getTypeName())) { + return ICON_BASE_PATH + "email.png"; + } else if (accountType.equals(Account.Type.FACEBOOK.getTypeName())) { + return ICON_BASE_PATH + "facebook.png"; + } else if (accountType.equals(Account.Type.INSTAGRAM.getTypeName())) { + return ICON_BASE_PATH + "instagram.png"; + } else if (accountType.equals(Account.Type.MESSAGING_APP.getTypeName())) { + return ICON_BASE_PATH + "messaging.png"; + } else if (accountType.equals(Account.Type.PHONE.getTypeName())) { + return ICON_BASE_PATH + "phone.png"; + } else if (accountType.equals(Account.Type.TWITTER.getTypeName())) { + return ICON_BASE_PATH + "twitter.png"; + } else if (accountType.equals(Account.Type.WEBSITE.getTypeName())) { + return ICON_BASE_PATH + "web-file.png"; + } else if (accountType.equals(Account.Type.WHATSAPP.getTypeName())) { + return ICON_BASE_PATH + "WhatsApp.png"; + } else if (accountType.equals(Account.Type.CREDIT_CARD.getTypeName())) { + return ICON_BASE_PATH + "credit-cards.png"; + } else { + return ICON_BASE_PATH + "face.png"; + } + } + /** * Main constructor. * @@ -186,15 +223,13 @@ public class DataArtifactTypeFactory extends TreeChildFactory itemData) { super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), - itemData, - Children.LEAF, - getDefaultLookup(itemData)); + getAccountIconPath(itemData.getTypeData().getAccountType()), + itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - // TODO action for clicking on a single account type. + dataResultPanel.displayAccounts(super.getItemData().getTypeData()); } } } From 8de44185db3d69207d69f49d48d0d8302822d9e9 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 16:35:19 -0500 Subject: [PATCH 108/142] fix --- .../autopsy/mainui/nodes/DataArtifactTypeFactory.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index a52e862022..893e41013d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.mainui.nodes; +import java.util.Objects; import java.util.concurrent.ExecutionException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; @@ -61,7 +62,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); - if ((this.dataSourceId == null || this.dataSourceId == searchParam.getDataSourceId()) && + if ((this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) && !DataArtifactDAO.getIgnoredTreeTypes().contains(searchParam.getArtifactType())) { return TreeChildFactory.createTreeItemDTO(originalTreeItem, new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId())); } From ce634ce909c3ffee0eac4c592bd8f4bb090e1cb4 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 16:50:36 -0500 Subject: [PATCH 109/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../autopsy/ingest/IngestJobExecutor.java | 159 +++++++++++------- 1 file changed, 95 insertions(+), 64 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 43fed4ce58..197b35ef2c 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -959,7 +959,7 @@ final class IngestJobExecutor { /** * Finishes all current progress bars. */ - private void finishProgressBars() { + private void finishAllProgressBars() { if (usingNetBeansGUI) { SwingUtilities.invokeLater(() -> { if (dataSourceIngestProgressBar != null) { @@ -1038,7 +1038,7 @@ final class IngestJobExecutor { shutDownIngestModulePipeline(currentDataSourceIngestPipeline); shutDownIngestModulePipeline(artifactIngestPipeline); - finishProgressBars(); + finishAllProgressBars(); if (ingestJobInfo != null) { if (jobCancelled) { @@ -1114,7 +1114,7 @@ final class IngestJobExecutor { if (!pipeline.isEmpty()) { /* * Get the file from the task. If the file was "streamed," - * the task may only have the file object ID and a trip to + * the task may only have the file object ID, and a trip to * the case database will be required. */ AbstractFile file; @@ -1128,47 +1128,24 @@ final class IngestJobExecutor { return; } + /** + * Run the file through the modules in the file ingest + * pipeline. + */ final String fileName = file.getName(); processedFiles++; - if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - if (processedFiles <= estimatedFilesToProcess) { - fileIngestProgressBar.progress(fileName, (int) processedFiles); - } else { - fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); - } - filesInProgress.add(fileName); - }); - } - - /** - * Run the file through the modules in the pipeline. - */ + updateFileIngestProgressForFileTaskStarted(fileName); List errors = new ArrayList<>(); errors.addAll(pipeline.performTask(task)); if (!errors.isEmpty()) { logIngestModuleErrors(errors, file); } - - if (usingNetBeansGUI && !jobCancelled) { - SwingUtilities.invokeLater(() -> { - /** - * Update the file ingest progress bar again, in - * case the file was being displayed. - */ - filesInProgress.remove(fileName); - if (filesInProgress.size() > 0) { - fileIngestProgressBar.progress(filesInProgress.get(0)); - } else { - fileIngestProgressBar.progress(""); - } - }); - } + updateFileProgressBarForFileTaskCompleted(fileName); } fileIngestPipelinesQueue.put(pipeline); } } catch (InterruptedException ex) { - logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file obj ID = %d)", task.getFileId()), ex); + logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file object ID = %d, thread ID = %d)", task.getFileId(), task.getThreadId()), ex); Thread.currentThread().interrupt(); } finally { taskScheduler.notifyTaskCompleted(task); @@ -1263,7 +1240,7 @@ final class IngestJobExecutor { /** * Updates the display name shown on the current data source level ingest - * progress bar for this job. + * progress bar for this job, if the job has not been cancelled. * * @param displayName The new display name. */ @@ -1279,8 +1256,8 @@ final class IngestJobExecutor { /** * Switches the current data source level ingest progress bar to determinate - * mode. This should be called if the total work units to process the data - * source is known. + * mode, if the job has not been cancelled. This should be called if the + * total work units to process the data source is known. * * @param workUnits Total number of work units for the processing of the * data source. @@ -1297,8 +1274,8 @@ final class IngestJobExecutor { /** * Switches the current data source level ingest progress bar to - * indeterminate mode. This should be called if the total work units to - * process the data source is unknown. + * indeterminate mode, if the job has not been cancelled. This should be + * called if the total work units to process the data source is unknown. */ void switchDataSourceIngestProgressBarToIndeterminate() { if (usingNetBeansGUI && !jobCancelled) { @@ -1312,7 +1289,8 @@ final class IngestJobExecutor { /** * Updates the current data source level ingest progress bar with the number - * of work units performed, if in the determinate mode. + * of work units performed, if in the determinate mode, and the job has not + * been cancelled. * * @param workUnits Number of work units performed. */ @@ -1328,7 +1306,8 @@ final class IngestJobExecutor { /** * Updates the current data source level ingest progress bar with a new task - * name, where the task name is the "subtitle" under the display name. + * name, where the task name is the "subtitle" under the display name, if + * the job has not been cancelled. * * @param currentTask The task name. */ @@ -1344,8 +1323,9 @@ final class IngestJobExecutor { /** * Updates the current data source level ingest progress bar with a new task - * name and the number of work units performed, if in the determinate mode. - * The task name is the "subtitle" under the display name. + * name and the number of work units performed, if in the determinate mode, + * and the job has not been cancelled. The task name is the "subtitle" under + * the display name. * * @param currentTask The task name. * @param workUnits Number of work units performed. @@ -1374,6 +1354,74 @@ final class IngestJobExecutor { } } + /** + * Updates the current file ingest progress bar upon start of analysis of a + * file, if the job has not been cancelled, if the job has not been + * cancelled. + * + * @param fileName The name of the file. + */ + private void updateFileIngestProgressForFileTaskStarted(String fileName) { + if (usingNetBeansGUI && !jobCancelled) { + SwingUtilities.invokeLater(() -> { + if (processedFiles <= estimatedFilesToProcess) { + fileIngestProgressBar.progress(fileName, (int) processedFiles); + } else { + fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); + } + filesInProgress.add(fileName); + }); + } + } + + /** + * Updates the current file ingest progress bar upon completion of analysis + * of a file, if the job has not been cancelled. + * + * @param fileName The name of the file. + */ + private void updateFileProgressBarForFileTaskCompleted(String fileName) { + if (usingNetBeansGUI && !jobCancelled) { + SwingUtilities.invokeLater(() -> { + filesInProgress.remove(fileName); + /* + * Display the name of another file in progress, or the empty + * string if there are none. + */ + if (filesInProgress.size() > 0) { + fileIngestProgressBar.progress(filesInProgress.get(0)); + } else { + fileIngestProgressBar.progress(""); // NON-NLS + } + }); + } + } + + /** + * Displays a "cancelling" message on all of the current ingest message + * progress bars. + */ + private void displayCancellingProgressMessage() { + if (usingNetBeansGUI) { + SwingUtilities.invokeLater(() -> { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName())); + dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } + + if (fileIngestProgressBar != null) { + fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); + fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } + + if (artifactIngestProgressBar != null) { + artifactIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataArtifactIngest.displayName", dataSource.getName())); + artifactIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); + } + }); + } + } + /** * Queries whether or not a temporary cancellation of data source level * ingest in order to stop the currently executing data source level ingest @@ -1387,14 +1435,16 @@ final class IngestJobExecutor { /** * Rescinds a temporary cancellation of data source level ingest that was - * used to stop a single data source level ingest module for this job. + * used to stop a single data source level ingest module for this job. The + * data source ingest progress bar is reset, if the job has not been + * cancelled. * * @param moduleDisplayName The display name of the module that was stopped. */ void currentDataSourceIngestModuleCancellationCompleted(String moduleDisplayName) { currentDataSourceIngestModuleCancelled = false; cancelledDataSourceIngestModules.add(moduleDisplayName); - if (usingNetBeansGUI) { + if (usingNetBeansGUI && !jobCancelled) { SwingUtilities.invokeLater(() -> { /** * A new progress bar must be created because the cancel button @@ -1443,26 +1493,7 @@ final class IngestJobExecutor { void cancel(IngestJob.CancellationReason reason) { jobCancelled = true; cancellationReason = reason; - - if (usingNetBeansGUI) { - SwingUtilities.invokeLater(() -> { - if (dataSourceIngestProgressBar != null) { - dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName())); - dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); - } - - if (fileIngestProgressBar != null) { - fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); - fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); - } - - if (artifactIngestProgressBar != null) { - artifactIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataArtifactIngest.displayName", dataSource.getName())); - artifactIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); - } - }); - } - + displayCancellingProgressMessage(); IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this); synchronized (threadRegistrationLock) { From c80d81675f324c90f14dc52a45a33eb142df87d2 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 29 Nov 2021 16:58:46 -0500 Subject: [PATCH 110/142] 8202 ingest prog bars on EDT; job cancel off EDT --- Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java | 2 +- Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index a508110b99..350096d626 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -170,7 +170,7 @@ public final class IngestJob { * Starts data source level analysis for this job if it is running in * streaming ingest mode. */ - void processStreamingIngestDataSource() { + void addStreamedDataSource() { if (ingestMode == Mode.STREAMING) { if (ingestModuleExecutor != null) { ingestModuleExecutor.addStreamedDataSource(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java index 2d00727858..fe43bb12b3 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java @@ -67,7 +67,7 @@ class IngestJobInputStream implements IngestStream { @Override public synchronized void close() { closed = true; - ingestJob.processStreamingIngestDataSource(); + ingestJob.addStreamedDataSource(); } @Override From 17bcd8e0b3c3988f82dc908957f5598d820d3c47 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 19:19:27 -0500 Subject: [PATCH 111/142] fixes --- .../org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java index dccad57be5..1070fd9907 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java @@ -31,6 +31,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; @@ -130,7 +131,7 @@ public class ViewsDAO extends AbstractDAO { FileTypeExtensionsEvent extEvt = (FileTypeExtensionsEvent) eventData; String extension = extEvt.getExtension().toLowerCase(); return key.getFilter().getFilter().contains(extension) - && (key.getDataSourceId() == null || key.getDataSourceId() == extEvt.getDataSourceId()); + && (key.getDataSourceId() == null || key.getDataSourceId().equals(extEvt.getDataSourceId())); } private boolean isFilesByMimeInvalidating(FileTypeMimeSearchParams key, DAOEvent eventData) { @@ -140,7 +141,7 @@ public class ViewsDAO extends AbstractDAO { FileTypeMimeEvent mimeEvt = (FileTypeMimeEvent) eventData; return mimeEvt.getMimeType().startsWith(key.getMimeType()) - && (key.getDataSourceId() == null || key.getDataSourceId() == mimeEvt.getDataSourceId()); + && (key.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), mimeEvt.getDataSourceId())); } private boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, DAOEvent eventData) { @@ -150,7 +151,7 @@ public class ViewsDAO extends AbstractDAO { FileTypeSizeEvent sizeEvt = (FileTypeSizeEvent) eventData; return sizeEvt.getSizeFilter().equals(key.getSizeFilter()) - && (key.getDataSourceId() == null || key.getDataSourceId() == sizeEvt.getDataSourceId()); + && (key.getDataSourceId() == null || Objects.equals(key.getDataSourceId(), sizeEvt.getDataSourceId())); } /** @@ -765,7 +766,7 @@ public class ViewsDAO extends AbstractDAO { // if search params is top level mime prefix (without suffix) and data source is null or ==. if (mimePieces.getValue() == null && (mimeParams.getDataSourceId() == null - || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> ds == mimeParams.getDataSourceId()))) { + || suffixes.values().stream().flatMap(set -> set.stream()).anyMatch(ds -> Objects.equals(mimeParams.getDataSourceId(), ds)))) { concurrentMap.remove(k); // otherwise, see if suffix is present From 31974462d0bb8d8fca9949a639c324af4fac725e Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 20:13:54 -0500 Subject: [PATCH 112/142] integrate rearchitecture code --- .../mainui/datamodel/DataArtifactDAO.java | 15 ++-- .../mainui/nodes/Bundle.properties-MERGED | 1 + .../mainui/nodes/DataArtifactTypeFactory.java | 69 +++++++++++++------ 3 files changed, 59 insertions(+), 26 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 8a6f67fb62..d538a28067 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -286,7 +286,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { new AccountSearchParams(accountType, dataSourceId), accountType, accountDisplayName, - count)); + TreeDisplayCount.getDeterminate(count))); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); @@ -335,7 +335,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } // invalidate cache entries that are affected by events - ConcurrentMap, DataArtifactTableSearchResultsDTO> concurrentMap = this.dataArtifactCache.asMap(); + ConcurrentMap, DataArtifactTableSearchResultsDTO> concurrentMap = this.dataArtifactCache.asMap(); concurrentMap.forEach((k, v) -> { Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType()); if (dsIds != null) { @@ -432,13 +432,18 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { super(params); } + protected DataArtifactDAO getDAO() { + return MainDAO.getInstance().getDataArtifactsDAO(); + } + + @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException { - return MainDAO.getInstance().getDataArtifactsDAO().getAccountsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); + public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { + return getDAO().getAccountsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); } @Override - public boolean isRefreshRequired(PropertyChangeEvent evt) { + public boolean isRefreshRequired(DAOEvent evt) { // TODO return false; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED index 3568b60122..11ca64b00e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED @@ -1,4 +1,5 @@ AnalysisResultTypeFactory_adHocName=Adhoc Results +DataArtifactTypeFactory_AccountTypeParentNode_displayName=Communcation Accounts FileSystemFactory.FileSystemTreeNode.ExtractUnallocAction.text=Extract Unallocated Space to Single Files FileSystemFactory.UnsupportedTreeNode.displayName=Unsupported Content ImageNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index fff1748a91..e586a4dfb6 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -21,12 +21,7 @@ package org.sleuthkit.autopsy.mainui.nodes; import java.util.Objects; import java.util.concurrent.ExecutionException; import org.openide.nodes.Children; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; -import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; @@ -63,7 +58,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - if (rowData.getTypeData().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + if (rowData.getSearchParams().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { return new AccountTypeParentNode(rowData, this.dataSourceId); } else { return new DataArtifactTypeTreeNode(rowData); @@ -76,8 +71,8 @@ public class DataArtifactTypeFactory extends TreeChildFactory originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); - if ((this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) && - !DataArtifactDAO.getIgnoredTreeTypes().contains(searchParam.getArtifactType())) { + if ((this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) + && !DataArtifactDAO.getIgnoredTreeTypes().contains(searchParam.getArtifactType())) { return TreeChildFactory.createTreeItemDTO(originalTreeItem, new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId())); } } @@ -114,8 +109,28 @@ public class DataArtifactTypeFactory extends TreeChildFactory { + /** + * Sets correct title (not using artifact type display name). + * + * @param itemData The item data. + * + * @return The updated data. + */ + private static TreeItemDTO createTitledData(TreeResultsDTO.TreeItemDTO itemData) { + return new TreeItemDTO<>( + itemData.getTypeId(), + itemData.getSearchParams(), + itemData.getId(), + Bundle.DataArtifactTypeFactory_AccountTypeParentNode_displayName(), + itemData.getDisplayCount() + ); + } + /** * Main constructor. * @@ -124,12 +139,19 @@ public class DataArtifactTypeFactory extends TreeChildFactory itemData, Long dataSourceId) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getIconPath(itemData.getTypeData().getArtifactType()), - itemData, + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getIconPath(itemData.getSearchParams().getArtifactType()), + createTitledData(itemData), Children.create(new AccountTypeFactory(dataSourceId), true), - getDefaultLookup(itemData)); + getDefaultLookup(itemData) + ); } + + @Override + protected void updateDisplayName(TreeItemDTO prevData, TreeItemDTO curData) { + super.updateDisplayName(prevData, createTitledData(curData)); + } + } /** @@ -156,14 +178,19 @@ public class DataArtifactTypeFactory extends TreeChildFactory createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + return new AccountTypeNode(rowData); } @Override - protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { - return new AccountTypeNode(rowData); + protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { + // TODO + return null; + } + + @Override + public int compare(AccountSearchParams o1, AccountSearchParams o2) { + return 0; } } @@ -214,14 +241,14 @@ public class DataArtifactTypeFactory extends TreeChildFactory itemData) { - super(itemData.getTypeData().getArtifactType().getTypeName(), - getAccountIconPath(itemData.getTypeData().getAccountType()), + super(itemData.getSearchParams().getArtifactType().getTypeName(), + getAccountIconPath(itemData.getSearchParams().getAccountType()), itemData); } @Override public void respondSelection(DataResultTopComponent dataResultPanel) { - dataResultPanel.displayAccounts(super.getItemData().getTypeData()); + dataResultPanel.displayAccounts(super.getItemData().getSearchParams()); } } } From 6129b70d7f675b0235d1affacfaa1b3c881f270e Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 29 Nov 2021 21:30:15 -0500 Subject: [PATCH 113/142] integrate account udpates --- .../mainui/datamodel/AccountSearchParams.java | 14 +- .../mainui/datamodel/DataArtifactDAO.java | 168 ++++++++++++------ .../mainui/datamodel/events/AccountEvent.java | 76 ++++++++ .../mainui/nodes/DataArtifactTypeFactory.java | 37 ++-- 4 files changed, 224 insertions(+), 71 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java index 9caa2513d3..b9959f80c5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.Objects; +import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.BlackboardArtifact; /** @@ -26,7 +27,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact; */ public class AccountSearchParams extends DataArtifactSearchParam { - private final String accountType; + private final Account.Type accountType; /** * Main constructor. @@ -34,7 +35,7 @@ public class AccountSearchParams extends DataArtifactSearchParam { * @param accountType The account type identifier. * @param dataSourceId The data source id to filter on or null. */ - public AccountSearchParams(String accountType, Long dataSourceId) { + public AccountSearchParams(Account.Type accountType, Long dataSourceId) { super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); this.accountType = accountType; } @@ -42,15 +43,15 @@ public class AccountSearchParams extends DataArtifactSearchParam { /** * @return The account type identifier. */ - public String getAccountType() { + public Account.Type getAccountType() { return accountType; } @Override public int hashCode() { - int hash = 5; - hash = 37 * hash + Objects.hashCode(this.accountType); - hash = 37 * hash + super.hashCode(); + int hash = 7; + hash = 29 * hash + Objects.hashCode(this.accountType); + hash = 29 * hash + super.hashCode(); return hash; } @@ -71,6 +72,7 @@ public class AccountSearchParams extends DataArtifactSearchParam { } return super.equals(obj); } + } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index d538a28067..aab4e2964d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -27,30 +27,30 @@ import java.beans.PropertyChangeEvent; import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; import java.util.logging.Level; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.AccountEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; +import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -84,7 +84,9 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final Cache, DataArtifactTableSearchResultsDTO> accountCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final TreeCounts treeCounts = new TreeCounts<>(); + private final TreeCounts accountCounts = new TreeCounts<>(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -151,7 +153,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " "; } - String expectedAccountType = searchParams.getParamData().getAccountType(); + String expectedAccountTypeName = searchParams.getParamData().getAccountType().getTypeName(); List allAccounts = new ArrayList<>(); allAccounts.addAll(blackboard.getDataArtifactsWhere(originalWhereClause)); @@ -161,8 +163,8 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { List arts = new ArrayList<>(); for (BlackboardArtifact art : allAccounts) { BlackboardAttribute accountTypeAttr = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); - if ((expectedAccountType == null && accountTypeAttr == null) - || (expectedAccountType != null && accountTypeAttr != null && expectedAccountType.equals(accountTypeAttr.getValueString()))) { + if ((expectedAccountTypeName == null && accountTypeAttr == null) + || (expectedAccountTypeName != null && accountTypeAttr != null && expectedAccountTypeName.equals(accountTypeAttr.getValueString()))) { arts.add(art); } } @@ -191,7 +193,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } SearchParams pagedSearchParams = new SearchParams<>(searchParams, startItem, maxCount); - return dataArtifactCache.get(pagedSearchParams, () -> fetchAccounts(pagedSearchParams)); + return accountCache.get(pagedSearchParams, () -> fetchAccounts(pagedSearchParams)); } private boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { @@ -204,8 +206,14 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - public void dropDataArtifactCache() { - dataArtifactCache.invalidateAll(); + private boolean isAccountInvalidating(AccountSearchParams parameters, DAOEvent evt) { + if (!(evt instanceof AccountEvent)) { + return false; + } else { + AccountEvent accountEvt = (AccountEvent) evt; + return Objects.equals(parameters.getAccountType(), evt.getType()) + && (parameters.getDataSourceId() == null || (parameters.getDataSourceId() == accountEvt.getDataSourceId())); + } } /** @@ -231,7 +239,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() .map(entry -> { - return createTreeItem(entry.getKey(), dataSourceId, + return createDataArtifactTreeItem(entry.getKey(), dataSourceId, indeterminateTypes.contains(entry.getKey()) ? TreeDisplayCount.INDETERMINATE : TreeDisplayCount.getDeterminate(entry.getValue())); @@ -278,15 +286,11 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { getCase().getCaseDbAccessManager().select(query, (resultSet) -> { try { while (resultSet.next()) { - String accountType = resultSet.getString("account_type"); + String accountTypeName = resultSet.getString("account_type"); String accountDisplayName = resultSet.getString("account_display_name"); + Account.Type accountType = new Account.Type(accountTypeName, accountDisplayName); long count = resultSet.getLong("count"); - accountParams.add(new TreeItemDTO<>( - accountType, - new AccountSearchParams(accountType, dataSourceId), - accountType, - accountDisplayName, - TreeDisplayCount.getDeterminate(count))); + accountParams.add(createAccountTreeItem(accountType, dataSourceId, TreeDisplayCount.getDeterminate(count))); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); @@ -304,6 +308,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override void clearCaches() { this.dataArtifactCache.invalidateAll(); + this.accountCache.invalidateAll(); this.handleIngestComplete(); } @@ -315,37 +320,46 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return Collections.emptySet(); } - Map> artifactTypeDataSourceMap = dataEvt.getArtifacts().stream() - .map((art) -> { - try { - if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { - return Pair.of(art.getType(), art.getDataSourceObjectID()); + Map> artifactTypeDataSourceMap = new HashMap<>(); + Map> accountTypeMap = new HashMap<>(); + + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + try { + if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { + if (art.getType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + BlackboardAttribute accountTypeAttribute = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); + if (accountTypeAttribute == null) { + continue; } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); + + String accountTypeName = accountTypeAttribute.getValueString(); + if (accountTypeName == null) { + continue; + } + + accountTypeMap.computeIfAbsent(getCase().getCommunicationsManager().getAccountType(accountTypeName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); + } else { + artifactTypeDataSourceMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); } - return null; - }) - .filter(pr -> pr != null) - .collect(Collectors.groupingBy(pr -> pr.getKey(), Collectors.mapping(pr -> pr.getValue(), Collectors.toSet()))); + } + } catch (NoCurrentCaseException | TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); + } + } // don't do anything else if no relevant events - if (artifactTypeDataSourceMap.isEmpty()) { + if (artifactTypeDataSourceMap.isEmpty() && accountTypeMap.isEmpty()) { return Collections.emptySet(); } - // invalidate cache entries that are affected by events - ConcurrentMap, DataArtifactTableSearchResultsDTO> concurrentMap = this.dataArtifactCache.asMap(); - concurrentMap.forEach((k, v) -> { - Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType()); - if (dsIds != null) { - Long searchDsId = k.getParamData().getDataSourceId(); - if (searchDsId == null || dsIds.contains(searchDsId)) { - concurrentMap.remove(k); - } - } - }); + invalidateInCaches(accountTypeMap, artifactTypeDataSourceMap); + return getDAOEvts(accountTypeMap, artifactTypeDataSourceMap); + } + + private Set getDAOEvts(Map> accountTypeDataSourceMap, Map> artifactTypeDataSourceMap) { // gather dao events based on artifacts List dataArtifactEvents = new ArrayList<>(); for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { @@ -356,16 +370,61 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - List newTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() - .map(daoEvt -> new TreeEvent(createTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) + List dataArtifactTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() + .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) .collect(Collectors.toList()); - return Stream.of(dataArtifactEvents, newTreeEvents) + List accountEvents = new ArrayList<>(); + for (Entry> entry : accountTypeDataSourceMap.entrySet()) { + Account.Type accountType = entry.getKey(); + for (Long dsObjId : entry.getValue()) { + AccountEvent newEvt = new AccountEvent(accountType, dsObjId); + accountEvents.add(newEvt); + } + } + + List newAccountTreeEvents = this.accountCounts.enqueueAll(accountEvents).stream() + .map(daoEvt -> new TreeEvent(createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) + .collect(Collectors.toList()); + + return Stream.of(dataArtifactEvents, dataArtifactTreeEvents, accountEvents, newAccountTreeEvents) .flatMap((lst) -> lst.stream()) .collect(Collectors.toSet()); } - private TreeItemDTO createTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { + /** + * Invalidates items in the cache based on digest of events. + * + * @param accountTypeMap Maps account type to data source ids. + * @param artifactTypeDataSourceMap Maps artifact type to data source ids. + */ + private void invalidateInCaches(Map> accountTypeMap, Map> artifactTypeDataSourceMap) { + // invalidate cache entries that are affected by events + ConcurrentMap, DataArtifactTableSearchResultsDTO> dataArtifactConcurrentMap = this.dataArtifactCache.asMap(); + dataArtifactConcurrentMap.forEach((k, v) -> { + Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType()); + + if (dsIds != null) { + Long searchDsId = k.getParamData().getDataSourceId(); + if (searchDsId == null || dsIds.contains(searchDsId)) { + dataArtifactConcurrentMap.remove(k); + } + } + }); + + ConcurrentMap, DataArtifactTableSearchResultsDTO> accountConcurrentMap = this.accountCache.asMap(); + accountConcurrentMap.forEach((k, v) -> { + Set dsIds = accountTypeMap.get(((AccountSearchParams) k.getParamData()).getAccountType()); + if (dsIds != null) { + Long searchDsId = k.getParamData().getDataSourceId(); + if (searchDsId == null || dsIds.contains(searchDsId)) { + accountConcurrentMap.remove(k); + } + } + }); + } + + private TreeItemDTO createDataArtifactTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeResultsDTO.TreeItemDTO<>( BlackboardArtifact.Category.DATA_ARTIFACT.name(), new DataArtifactSearchParam(artifactType, dataSourceId), @@ -374,17 +433,26 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { displayCount); } + private static TreeItemDTO createAccountTreeItem(Account.Type accountType, Long dataSourceId, TreeDisplayCount count) { + return new TreeItemDTO<>( + "ACCOUNTS", + new AccountSearchParams(accountType, dataSourceId), + accountType.getTypeName(), + accountType.getDisplayName(), + count); + } + @Override Set handleIngestComplete() { return this.treeCounts.flushEvents().stream() - .map(daoEvt -> new TreeEvent(createTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) + .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) .collect(Collectors.toSet()); } @Override Set shouldRefreshTree() { return this.treeCounts.getEventTimeouts().stream() - .map(daoEvt -> new TreeEvent(createTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) + .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) .collect(Collectors.toSet()); } @@ -436,7 +504,6 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return MainDAO.getInstance().getDataArtifactsDAO(); } - @Override public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { return getDAO().getAccountsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); @@ -444,8 +511,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override public boolean isRefreshRequired(DAOEvent evt) { - // TODO - return false; + return getDAO().isAccountInvalidating(this.getParameters(), evt); } } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java new file mode 100644 index 0000000000..c9cec5071c --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java @@ -0,0 +1,76 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel.events; + +import java.util.Objects; +import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.BlackboardArtifact; + +/** + * Search parameters for accounts. + */ +public class AccountEvent extends DataArtifactEvent { + + private final Account.Type accountType; + + /** + * Main constructor. + * + * @param accountType The account type identifier. + * @param dataSourceId The data source id to filter on or null. + */ + public AccountEvent(Account.Type accountType, Long dataSourceId) { + super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); + this.accountType = accountType; + } + + /** + * @return The account type identifier. + */ + public Account.Type getAccountType() { + return accountType; + } + + @Override + public int hashCode() { + int hash = 7; + hash = 29 * hash + Objects.hashCode(this.accountType); + hash = 29 * hash + super.hashCode(); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final AccountEvent other = (AccountEvent) obj; + if (!Objects.equals(this.accountType, other.accountType)) { + return false; + } + return super.equals(obj); + } + +} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index e586a4dfb6..fe7f33005f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -32,6 +32,7 @@ import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; import org.sleuthkit.datamodel.Account; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.AccountEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -184,13 +185,21 @@ public class DataArtifactTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent treeEvt) { - // TODO + if (treeEvt.getItemRecord().getSearchParams() instanceof AccountEvent) { + @SuppressWarnings("unchecked") + TreeItemDTO originalTreeItem = (TreeItemDTO) treeEvt.getItemRecord(); + AccountSearchParams searchParam = originalTreeItem.getSearchParams(); + if (this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) { + return TreeChildFactory.createTreeItemDTO(originalTreeItem, + new AccountSearchParams(searchParam.getAccountType(), searchParam.getDataSourceId())); + } + } return null; } @Override public int compare(AccountSearchParams o1, AccountSearchParams o2) { - return 0; + return o1.getAccountType().getDisplayName().compareToIgnoreCase(o2.getAccountType().getDisplayName()); } } @@ -206,29 +215,29 @@ public class DataArtifactTypeFactory extends TreeChildFactory Date: Mon, 29 Nov 2021 21:56:54 -0500 Subject: [PATCH 114/142] fixes --- .../autopsy/mainui/nodes/DataArtifactTypeFactory.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index fe7f33005f..49fdadc483 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -74,7 +74,12 @@ public class DataArtifactTypeFactory extends TreeChildFactory( + BlackboardArtifact.Category.DATA_ARTIFACT.name(), + new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId()), + searchParam.getArtifactType().getTypeID(), + searchParam.getArtifactType().getDisplayName(), + originalTreeItem.getDisplayCount()); } } return null; @@ -190,7 +195,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory originalTreeItem = (TreeItemDTO) treeEvt.getItemRecord(); AccountSearchParams searchParam = originalTreeItem.getSearchParams(); if (this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) { - return TreeChildFactory.createTreeItemDTO(originalTreeItem, + return TreeChildFactory.createTreeItemDTO(originalTreeItem, new AccountSearchParams(searchParam.getAccountType(), searchParam.getDataSourceId())); } } From 256f97a6fcc52699a950247e8d30fd77032f9620 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 11:01:13 -0500 Subject: [PATCH 115/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 197b35ef2c..858d3b5bc5 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -704,7 +704,11 @@ final class IngestJobExecutor { currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; /* - * Schedule ingest tasks. + * Schedule ingest tasks. If only analyzing a subset of the files in + * the data source, the current assumption is that only file ingest + * task need to be scheduled. Data artifact ingest tasks will be + * scheduled as data artifacts produced by the file analysis are + * posted to the blackboard. */ if (!files.isEmpty() && hasFileIngestModules()) { taskScheduler.scheduleFileIngestTasks(this, files); From 097f7fdd4e58977de64426774957744b830b3719 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 11:16:55 -0500 Subject: [PATCH 116/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../autopsy/ingest/IngestJobExecutor.java | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 858d3b5bc5..9eeb220cc2 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -706,9 +706,9 @@ final class IngestJobExecutor { /* * Schedule ingest tasks. If only analyzing a subset of the files in * the data source, the current assumption is that only file ingest - * task need to be scheduled. Data artifact ingest tasks will be - * scheduled as data artifacts produced by the file analysis are - * posted to the blackboard. + * tasks for those files need to be scheduled. Data artifact ingest + * tasks will be scheduled as data artifacts produced by the file + * analysis are posted to the blackboard. */ if (!files.isEmpty() && hasFileIngestModules()) { taskScheduler.scheduleFileIngestTasks(this, files); @@ -719,10 +719,11 @@ final class IngestJobExecutor { /* * Check for stage completion. This is necessary because it is * possible that none of the tasks that were just scheduled will - * actually make it to task execution due to the file filter or - * other ingest job settings. In that case, there will never be a - * stage completion check in an ingest thread executing an ingest - * task, so such a job would run forever without a check here. + * actually make it to task execution, due to the file filter or + * other ingest job settings. If that happens, there will never be + * another stage completion check for this job in an ingest thread + * executing an ingest task, so such a job would run forever without + * a check here. */ checkForStageCompleted(); } @@ -769,8 +770,9 @@ final class IngestJobExecutor { * This constraint is implemented by restricting construction of * a streaming mode IngestJob to * IngestManager.openIngestStream(), which constructs and starts - * the job before returning the IngestStream that is used to - * stream in the files and data source. + * the job before returning the IngestStream. This means that + * the code in this method will run before addStreamedFiles() or + * addStreamedDataSource() can be called via the IngestStream. */ taskScheduler.scheduleDataArtifactIngestTasks(this); } @@ -822,8 +824,8 @@ final class IngestJobExecutor { * time, and all of the file level and artifact ingest tasks * scheduled during the initial file streaming stage have * already been executed, there will never be a stage completion - * check in an ingest thread executing an ingest task, so such a - * job would run forever without a check here. + * check in an ingest thread executing an ingest task for this + * job, so such a job would run forever without a check here. */ checkForStageCompleted(); } From 5ed808b9b19a5fe5f90695b565b273b99b15b1c2 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 30 Nov 2021 12:08:27 -0500 Subject: [PATCH 117/142] improvements --- .../autopsy/mainui/datamodel/AbstractDAO.java | 49 +++++++++++++++++++ .../mainui/datamodel/DataArtifactDAO.java | 40 ++------------- .../mainui/nodes/DataArtifactTypeFactory.java | 41 ++++++++-------- .../mainui/nodes/TreeChildFactory.java | 23 +++++++++ 4 files changed, 98 insertions(+), 55 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index a03b0a1528..3e14d9f443 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -18,9 +18,16 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; +import com.google.common.cache.Cache; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import java.beans.PropertyChangeEvent; +import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentMap; +import java.util.function.Function; +import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** @@ -63,4 +70,46 @@ abstract class AbstractDAO { * @return The categories that require a tree refresh. */ abstract Set shouldRefreshTree(); + + /** + * Using a digest of event information, clears keys in a cache that may be + * effected by events. + * + * @param cache The cache. + * @param getKeys Using a key from a cache, provides a tuple + * of the relevant key in the data source + * mapping and the data source id (or null if + * no data source filtering). + * @param itemDataSourceMapping The event digest. + */ + void invalidateKeys(Cache, ?> cache, Function> getKeys, Map> itemDataSourceMapping) { + invalidateKeys(cache, getKeys, Collections.singletonList(itemDataSourceMapping)); + } + + /** + * Using a digest of event information, clears keys in a cache that may be + * effected by events. + * + * @param cache The cache. + * @param getKeys Using a key from a cache, provides a tuple + * of the relevant key in the data source + * mapping and the data source id (or null if + * no data source filtering). + * @param itemDataSourceMapping The list of event digests. + */ + void invalidateKeys(Cache, ?> cache, Function> getKeys, List>> itemDataSourceMapping) { + ConcurrentMap, ?> concurrentMap = cache.asMap(); + concurrentMap.forEach((k, v) -> { + Pair pairItems = getKeys.apply(k.getParamData()); + T searchParamsKey = pairItems.getLeft(); + Long searchParamsDsId = pairItems.getRight(); + for (Map> itemDsMapping : itemDataSourceMapping) { + Set dsIds = itemDsMapping.get(searchParamsKey); + if (dsIds != null && (searchParamsDsId == null || dsIds.contains(searchParamsDsId))) { + concurrentMap.remove(k); + } + } + }); + } + } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index aab4e2964d..fbcc1fa975 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -42,6 +42,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; @@ -83,7 +84,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return BlackboardArtifactDAO.getIgnoredTreeTypes(); } - private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); + private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final Cache, DataArtifactTableSearchResultsDTO> accountCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final TreeCounts treeCounts = new TreeCounts<>(); private final TreeCounts accountCounts = new TreeCounts<>(); @@ -354,8 +355,9 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return Collections.emptySet(); } - invalidateInCaches(accountTypeMap, artifactTypeDataSourceMap); - + super.invalidateKeys(this.dataArtifactCache, (sp) -> Pair.of(sp.getArtifactType(), sp.getDataSourceId()), artifactTypeDataSourceMap); + super.invalidateKeys(this.accountCache, (sp) -> Pair.of(sp.getAccountType(), sp.getDataSourceId()), accountTypeMap); + return getDAOEvts(accountTypeMap, artifactTypeDataSourceMap); } @@ -392,38 +394,6 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .collect(Collectors.toSet()); } - /** - * Invalidates items in the cache based on digest of events. - * - * @param accountTypeMap Maps account type to data source ids. - * @param artifactTypeDataSourceMap Maps artifact type to data source ids. - */ - private void invalidateInCaches(Map> accountTypeMap, Map> artifactTypeDataSourceMap) { - // invalidate cache entries that are affected by events - ConcurrentMap, DataArtifactTableSearchResultsDTO> dataArtifactConcurrentMap = this.dataArtifactCache.asMap(); - dataArtifactConcurrentMap.forEach((k, v) -> { - Set dsIds = artifactTypeDataSourceMap.get(k.getParamData().getArtifactType()); - - if (dsIds != null) { - Long searchDsId = k.getParamData().getDataSourceId(); - if (searchDsId == null || dsIds.contains(searchDsId)) { - dataArtifactConcurrentMap.remove(k); - } - } - }); - - ConcurrentMap, DataArtifactTableSearchResultsDTO> accountConcurrentMap = this.accountCache.asMap(); - accountConcurrentMap.forEach((k, v) -> { - Set dsIds = accountTypeMap.get(((AccountSearchParams) k.getParamData()).getAccountType()); - if (dsIds != null) { - Long searchDsId = k.getParamData().getDataSourceId(); - if (searchDsId == null || dsIds.contains(searchDsId)) { - accountConcurrentMap.remove(k); - } - } - }); - } - private TreeItemDTO createDataArtifactTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeResultsDTO.TreeItemDTO<>( BlackboardArtifact.Category.DATA_ARTIFACT.name(), diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 49fdadc483..4501c6baf2 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -32,7 +32,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; import org.sleuthkit.datamodel.Account; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; -import org.sleuthkit.autopsy.mainui.datamodel.events.AccountEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -68,19 +67,20 @@ public class DataArtifactTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent daoEvt) { - if (daoEvt.getItemRecord().getSearchParams() instanceof DataArtifactSearchParam) { - @SuppressWarnings("unchecked") - TreeItemDTO originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); + + TreeItemDTO originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); + + if (originalTreeItem != null + && !DataArtifactDAO.getIgnoredTreeTypes().contains(originalTreeItem.getSearchParams().getArtifactType()) + && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { + DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); - if ((this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) - && !DataArtifactDAO.getIgnoredTreeTypes().contains(searchParam.getArtifactType())) { - return new TreeItemDTO<>( - BlackboardArtifact.Category.DATA_ARTIFACT.name(), - new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId()), - searchParam.getArtifactType().getTypeID(), - searchParam.getArtifactType().getDisplayName(), - originalTreeItem.getDisplayCount()); - } + return new TreeItemDTO<>( + BlackboardArtifact.Category.DATA_ARTIFACT.name(), + new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId()), + searchParam.getArtifactType().getTypeID(), + searchParam.getArtifactType().getDisplayName(), + originalTreeItem.getDisplayCount()); } return null; } @@ -190,15 +190,16 @@ public class DataArtifactTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent treeEvt) { - if (treeEvt.getItemRecord().getSearchParams() instanceof AccountEvent) { - @SuppressWarnings("unchecked") - TreeItemDTO originalTreeItem = (TreeItemDTO) treeEvt.getItemRecord(); + + TreeItemDTO originalTreeItem = getTypedTreeItem(treeEvt, AccountSearchParams.class); + + if (originalTreeItem != null + && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { AccountSearchParams searchParam = originalTreeItem.getSearchParams(); - if (this.dataSourceId == null || Objects.equals(this.dataSourceId, searchParam.getDataSourceId())) { - return TreeChildFactory.createTreeItemDTO(originalTreeItem, - new AccountSearchParams(searchParam.getAccountType(), searchParam.getDataSourceId())); - } + return TreeChildFactory.createTreeItemDTO(originalTreeItem, + new AccountSearchParams(searchParam.getAccountType(), searchParam.getDataSourceId())); } + return null; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index ed3dc1aafc..96e5232600 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -251,6 +251,29 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable TreeItemDTO getTypedTreeItem(TreeEvent treeEvt, Class expectedSearchParamsType) { + if (treeEvt != null && treeEvt.getItemRecord() != null && treeEvt.getItemRecord().getSearchParams() != null + && expectedSearchParamsType.isAssignableFrom(treeEvt.getItemRecord().getSearchParams().getClass())) { + + @SuppressWarnings("unchecked") + TreeItemDTO originalTreeItem = (TreeItemDTO) treeEvt.getItemRecord(); + return originalTreeItem; + } + return null; + } + /** * Creates a TreeNode given the tree item data. * From ea16779d82212bfb16c14fec993d46e973cc4688 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 12:24:06 -0500 Subject: [PATCH 118/142] 8202 ingest prog bars on EDT; job cancel off EDT --- .../AdHocSearchChildFactory.java | 47 ++-- .../keywordsearch/IngestSearchRunner.java | 201 ++++++++---------- .../autopsy/keywordsearch/QueryResults.java | 160 ++++++-------- 3 files changed, 193 insertions(+), 215 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java index 4dfc014598..c01a81ff66 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java @@ -31,15 +31,18 @@ import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import org.netbeans.api.progress.ProgressHandle; import org.openide.nodes.ChildFactory; import org.openide.nodes.Children; import org.openide.nodes.Node; +import org.openide.util.Cancellable; import org.openide.util.NbBundle; import org.openide.util.lookup.Lookups; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode; @@ -89,7 +92,8 @@ class AdHocSearchChildFactory extends ChildFactory { * Constructor * * @param queryRequests Query results - * @param saveResults Flag whether to save search results as KWS artifacts. + * @param saveResults Flag whether to save search results as KWS + * artifacts. */ AdHocSearchChildFactory(Collection queryRequests, boolean saveResults) { this.queryRequests = queryRequests; @@ -129,7 +133,7 @@ class AdHocSearchChildFactory extends ChildFactory { createFlatKeys(queryRequest.getQuery(), toPopulate); } - + // If there were no hits, make a single Node that will display that // no results were found. if (toPopulate.isEmpty()) { @@ -176,7 +180,7 @@ class AdHocSearchChildFactory extends ChildFactory { * Get file properties. */ Map properties = new LinkedHashMap<>(); - + /** * Add a snippet property, if available. */ @@ -204,7 +208,6 @@ class AdHocSearchChildFactory extends ChildFactory { properties.put(LOCATION.toString(), contentName); } - String hitName; BlackboardArtifact artifact = null; if (hit.isArtifactHit()) { @@ -414,21 +417,35 @@ class AdHocSearchChildFactory extends ChildFactory { this.saveResults = saveResults; } - protected void finalizeWorker() { - deregisterWriter(this); - EventQueue.invokeLater(progress::finish); - } - @Override protected Void doInBackground() throws Exception { - registerWriter(this); //register (synchronized on class) outside of writerLock to prevent deadlock - final String queryStr = query.getQueryString(); - final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr; try { - progress = ProgressHandle.createHandle(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), () -> BlackboardResultWriter.this.cancel(true)); - hits.process(progress, null, this, false, saveResults, null); + if (RuntimeProperties.runningWithGUI()) { + final String queryStr = query.getQueryString(); + final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr; + SwingUtilities.invokeLater(() -> { + progress = ProgressHandle.createHandle( + NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), + new Cancellable() { + @Override + public boolean cancel() { + //progress.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); + logger.log(Level.INFO, "Ad hoc search cancelled by user"); //NON-NLS + new Thread(() -> { + BlackboardResultWriter.this.cancel(true); + }).start(); + return true; + } + }); + }); + } + registerWriter(this); //register (synchronized on class) outside of writerLock to prevent deadlock + hits.process(this, false, saveResults, null); } finally { - finalizeWorker(); + deregisterWriter(this); + if (RuntimeProperties.runningWithGUI() && progress != null) { + EventQueue.invokeLater(progress::finish); + } } return null; } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java index 383abbd3af..8c7c0ffa0a 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -38,15 +38,16 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; -import org.netbeans.api.progress.aggregate.AggregateProgressFactory; +import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.aggregate.AggregateProgressHandle; -import org.netbeans.api.progress.aggregate.ProgressContributor; import org.openide.util.Cancellable; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -248,7 +249,8 @@ final class IngestSearchRunner { } /** - * Task to perform periodic searches for each job (does a single index commit first) + * Task to perform periodic searches for each job (does a single index + * commit first) */ private final class PeriodicSearchTask implements Runnable { @@ -296,24 +298,23 @@ final class IngestSearchRunner { NbBundle.getMessage(this.getClass(), "SearchRunner.Searcher.done.err.msg"), ex.getMessage())); }// catch and ignore if we were cancelled - catch (java.util.concurrent.CancellationException ex) { + catch (java.util.concurrent.CancellationException ex) { } } } stopWatch.stop(); logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS - + // calculate "hold off" time recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG - + // schedule next PeriodicSearchTask jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS); - + // exit this thread return; } - - + private void recalculateUpdateIntervalTime(long lastSerchTimeSec) { // If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) { @@ -321,7 +322,7 @@ final class IngestSearchRunner { } // double the search interval currentUpdateIntervalMs = currentUpdateIntervalMs * 2; - logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000}); + logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs / 1000}); return; } } @@ -447,13 +448,15 @@ final class IngestSearchRunner { /** * Searcher has private copies/snapshots of the lists and keywords */ - private SearchJobInfo job; - private List keywords; //keywords to search - private List keywordListNames; // lists currently being searched - private List keywordLists; - private Map keywordToList; //keyword to list name mapping + private final SearchJobInfo job; + private final List keywords; //keywords to search + private final List keywordListNames; // lists currently being searched + private final List keywordLists; + private final Map keywordToList; //keyword to list name mapping + private final boolean usingNetBeansGUI; + @ThreadConfined(type = ThreadConfined.ThreadType.AWT) + private ProgressHandle progressIndicator; private AggregateProgressHandle progressGroup; - private final Logger logger = Logger.getLogger(IngestSearchRunner.Searcher.class.getName()); private boolean finalRun = false; Searcher(SearchJobInfo job) { @@ -463,6 +466,7 @@ final class IngestSearchRunner { keywordToList = new HashMap<>(); keywordLists = new ArrayList<>(); //keywords are populated as searcher runs + usingNetBeansGUI = RuntimeProperties.runningWithGUI(); } Searcher(SearchJobInfo job, boolean finalRun) { @@ -473,76 +477,88 @@ final class IngestSearchRunner { @Override @Messages("SearchRunner.query.exception.msg=Error performing query:") protected Object doInBackground() throws Exception { - final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") - + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); - final String pgDisplayName = displayName + (" (" + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") + ")"); - progressGroup = AggregateProgressFactory.createSystemHandle(pgDisplayName, null, new Cancellable() { - @Override - public boolean cancel() { - logger.log(Level.INFO, "Cancelling the searcher by user."); //NON-NLS - if (progressGroup != null) { - progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); - } - progressGroup.finish(); - return IngestSearchRunner.Searcher.this.cancel(true); - } - }, null); - - updateKeywords(); - - ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; - int i = 0; - for (Keyword keywordQuery : keywords) { - subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm()); - progressGroup.addContributor(subProgresses[i]); - i++; + if (usingNetBeansGUI) { + /* + * If running in the NetBeans thick client application version + * of Autopsy, NetBeans progress handles (i.e., progress bars) + * are used to display search progress in the lower right hand + * corner of the main application window. + * + * A layer of abstraction to allow alternate representations of + * progress could be used here, as it is in other places in the + * application (see implementations and usage of + * org.sleuthkit.autopsy.progress.ProgressIndicator interface), + * to better decouple keyword search from the application's + * presentation layer. + */ + final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); + SwingUtilities.invokeLater(() -> { + progressIndicator = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + progressIndicator.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); + logger.log(Level.INFO, "Search cancelled by user"); //NON-NLS + new Thread(() -> { + IngestSearchRunner.Searcher.this.cancel(true); + }).start(); + return true; + } + }); + progressIndicator.start(); + progressIndicator.switchToIndeterminate(); + }); } - progressGroup.start(); - final StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { - progressGroup.setDisplayName(displayName); - - int keywordsSearched = 0; - + updateKeywords(); for (Keyword keyword : keywords) { - if (this.isCancelled() || this.job.getJobContext().fileIngestIsCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS + if (isCancelled() || job.getJobContext().fileIngestIsCancelled()) { + logger.log(Level.INFO, "Cancellation requested, exiting before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS return null; } - final KeywordList keywordList = keywordToList.get(keyword); - - //new subProgress will be active after the initial query - //when we know number of hits to start() with - if (keywordsSearched > 0) { - subProgresses[keywordsSearched - 1].finish(); + KeywordList keywordList = keywordToList.get(keyword); + if (usingNetBeansGUI) { + String searchTermStr = keyword.getSearchTerm(); + if (searchTermStr.length() > 50) { + searchTermStr = searchTermStr.substring(0, 49) + "..."; + } else { + searchTermStr = searchTermStr; + } + final String progressMessage = keywordList.getName() + ": " + searchTermStr; + SwingUtilities.invokeLater(() -> { + progressIndicator.progress(progressMessage); + }); } - KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList); - // Filtering //limit search to currently ingested data sources //set up a filter with 1 or more image ids OR'ed - final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId()); + KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList); + KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId()); keywordSearchQuery.addFilter(dataSourceFilter); - QueryResults queryResults; - // Do the actual search + QueryResults queryResults; try { queryResults = keywordSearchQuery.performQuery(); } catch (KeywordSearchModuleException | NoOpenCoreException ex) { logger.log(Level.SEVERE, "Error performing query: " + keyword.getSearchTerm(), ex); //NON-NLS - MessageNotifyUtil.Notify.error(Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(), ex.getCause().getMessage()); + if (usingNetBeansGUI) { + final String userMessage = Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(); + SwingUtilities.invokeLater(() -> { + MessageNotifyUtil.Notify.error(userMessage, ex.getCause().getMessage()); + }); + } //no reason to continue with next query if recovery failed //or wait for recovery to kick in and run again later //likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { - logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS + logger.log(Level.INFO, "Cancellation requested, exiting during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS return null; } @@ -551,42 +567,25 @@ final class IngestSearchRunner { QueryResults newResults = filterResults(queryResults); if (!newResults.getKeywords().isEmpty()) { - - // Write results to BB - //scale progress bar more more granular, per result sub-progress, within per keyword - int totalUnits = newResults.getKeywords().size(); - subProgresses[keywordsSearched].start(totalUnits); - int unitProgress = 0; - String queryDisplayStr = keyword.getSearchTerm(); - if (queryDisplayStr.length() > 50) { - queryDisplayStr = queryDisplayStr.substring(0, 49) + "..."; - } - subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress); - // Create blackboard artifacts - newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true, job.getJobId()); - - } //if has results - - //reset the status text before it goes away - subProgresses[keywordsSearched].progress(""); - - ++keywordsSearched; - - } //for each keyword - - } //end try block - catch (Exception ex) { - logger.log(Level.WARNING, "searcher exception occurred", ex); //NON-NLS - } finally { - try { - finalizeSearcher(); - stopWatch.stop(); - logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS - } finally { - // In case a thread is waiting on this worker to be done - job.searchNotify(); + newResults.process(this, keywordList.getIngestMessages(), true, job.getJobId()); + } } + } catch (Exception ex) { + logger.log(Level.WARNING, "Error occurred during keyword search", ex); //NON-NLS + } finally { + if (progressGroup != null) { + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + progressGroup.finish(); + } + }); + } + stopWatch.stop(); + logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS + // In case a thread is waiting on this worker to be done + job.searchNotify(); } return null; @@ -612,20 +611,6 @@ final class IngestSearchRunner { } } - /** - * Performs the cleanup that needs to be done right AFTER - * doInBackground() returns without relying on done() method that is not - * guaranteed to run. - */ - private void finalizeSearcher() { - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - progressGroup.finish(); - } - }); - } - /** * This method filters out all of the hits found in earlier periodic * searches and returns only the results found by the most recent diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java index 40ed7db43d..d58f9c1a5c 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; +import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import org.apache.commons.lang.StringUtils; import org.netbeans.api.progress.ProgressHandle; @@ -32,6 +33,7 @@ import org.netbeans.api.progress.aggregate.ProgressContributor; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; +import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestMessage; @@ -51,6 +53,8 @@ import org.sleuthkit.datamodel.TskCoreException; * about the search hits to the ingest inbox, and publishing an event to notify * subscribers of the blackboard posts. */ + + class QueryResults { private static final Logger logger = Logger.getLogger(QueryResults.class.getName()); @@ -131,10 +135,6 @@ class QueryResults { * All calls to the addResult method MUST be completed before calling this * method. * - * @param progress A progress indicator that reports the number of - * keywords processed. Can be null. - * @param subProgress A progress contributor that reports the keyword - * currently being processed. Can be null. * @param worker The SwingWorker that is being used to do the * processing, will be checked for task cancellation * before processing each keyword. @@ -145,19 +145,7 @@ class QueryResults { * @param ingestJobId The numeric identifier of the ingest job within which * the artifacts are being created, may be null. */ - void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) { - /* - * Initialize the progress indicator to the number of keywords that will - * be processed. - */ - if (null != progress) { - progress.start(getKeywords().size()); - } - - /* - * Process the keyword hits for each keyword. - */ - int keywordsProcessed = 0; + void process(SwingWorker worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) { final Collection hitArtifacts = new ArrayList<>(); for (final Keyword keyword : getKeywords()) { /* @@ -165,22 +153,7 @@ class QueryResults { */ if (worker.isCancelled()) { logger.log(Level.INFO, "Processing cancelled, exiting before processing search term {0}", keyword.getSearchTerm()); //NON-NLS - break; - } - - /* - * Update the progress indicator and the show the current keyword - * via the progress contributor. - */ - if (progress != null) { - progress.progress(keyword.toString(), keywordsProcessed); - } - if (subProgress != null) { - String hitDisplayStr = keyword.getSearchTerm(); - if (hitDisplayStr.length() > 50) { - hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; - } - subProgress.progress(query.getKeywordList().getName() + ": " + hitDisplayStr, keywordsProcessed); + return; } /* @@ -202,7 +175,7 @@ class QueryResults { snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !query.isLiteral(), true); } catch (NoOpenCoreException e) { logger.log(Level.SEVERE, "Solr core closed while executing snippet query " + snippetQuery, e); //NON-NLS - break; // Stop processing. + return; // Stop processing. } catch (Exception e) { logger.log(Level.SEVERE, "Error executing snippet query " + snippetQuery, e); //NON-NLS continue; // Try processing the next hit. @@ -242,8 +215,6 @@ class QueryResults { } } } - - ++keywordsProcessed; } /* @@ -298,69 +269,74 @@ class QueryResults { * @throws TskCoreException If there is a problem generating or send the * inbox message. */ - private void writeSingleFileInboxMessage(BlackboardArtifact artifact, Content hitContent) throws TskCoreException { - StringBuilder subjectSb = new StringBuilder(1024); - if (!query.isLiteral()) { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); - } else { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); - } + private void writeSingleFileInboxMessage(final BlackboardArtifact artifact, final Content hitContent) throws TskCoreException { + if (artifact != null && hitContent != null && RuntimeProperties.runningWithGUI()) { + final StringBuilder subjectSb = new StringBuilder(1024); + if (!query.isLiteral()) { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); + } else { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); + } - StringBuilder detailsSb = new StringBuilder(1024); - String uniqueKey = null; - BlackboardAttribute attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD)); - if (attr != null) { - final String keyword = attr.getValueString(); - subjectSb.append(keyword); - uniqueKey = keyword.toLowerCase(); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } + final StringBuilder detailsSb = new StringBuilder(1024); + String uniqueKey = null; + BlackboardAttribute attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD)); + if (attr != null) { + final String keyword = attr.getValueString(); + subjectSb.append(keyword); + uniqueKey = keyword.toLowerCase(); + detailsSb.append("
").append(EscapeUtil.escapeHtml(keyword)).append("
"); //NON-NLS + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } - //preview - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //file - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); - if (hitContent instanceof AbstractFile) { - AbstractFile hitFile = (AbstractFile) hitContent; - detailsSb.append(""); //NON-NLS - } else { - detailsSb.append(""); //NON-NLS - } - detailsSb.append(""); //NON-NLS - - //list - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); - if (attr != null) { - detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); - detailsSb.append(""); //NON-NLS - detailsSb.append(""); //NON-NLS - } - - //regex - if (!query.isLiteral()) { - attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP)); + //preview + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW)); if (attr != null) { detailsSb.append(""); //NON-NLS - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); + detailsSb.append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + + //file + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); + if (hitContent instanceof AbstractFile) { + AbstractFile hitFile = (AbstractFile) hitContent; + detailsSb.append(""); //NON-NLS + } else { + detailsSb.append(""); //NON-NLS + } + detailsSb.append(""); //NON-NLS + + //list + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); detailsSb.append(""); //NON-NLS detailsSb.append(""); //NON-NLS } - } - detailsSb.append("
").append(EscapeUtil.escapeHtml(keyword)).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("").append(hitContent.getName()).append("
").append(attr.getValueString()).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("").append(hitContent.getName()).append("
").append(attr.getValueString()).append("
"); //NON-NLS - IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), uniqueKey, artifact)); + //regex + if (!query.isLiteral()) { + attr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP)); + if (attr != null) { + detailsSb.append(""); //NON-NLS + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); + detailsSb.append("").append(attr.getValueString()).append(""); //NON-NLS + detailsSb.append(""); //NON-NLS + } + } + detailsSb.append(""); //NON-NLS + + String key = uniqueKey; // RC: Might be null, does this work? + SwingUtilities.invokeLater(() -> { + IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), key, artifact)); + }); + } } } From 65392209d3b3a988751a0bba415c6487feaf55ee Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 13:21:47 -0500 Subject: [PATCH 119/142] 8202 KWS cancellation out of EDT, progress ops in EDT --- .../keywordsearch/IngestSearchRunner.java | 75 +++++++++---------- 1 file changed, 36 insertions(+), 39 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java index 8c7c0ffa0a..1174e6fc05 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -39,7 +39,6 @@ import java.util.logging.Level; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import org.netbeans.api.progress.ProgressHandle; -import org.netbeans.api.progress.aggregate.AggregateProgressHandle; import org.openide.util.Cancellable; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; @@ -456,7 +455,6 @@ final class IngestSearchRunner { private final boolean usingNetBeansGUI; @ThreadConfined(type = ThreadConfined.ThreadType.AWT) private ProgressHandle progressIndicator; - private AggregateProgressHandle progressGroup; private boolean finalRun = false; Searcher(SearchJobInfo job) { @@ -477,42 +475,42 @@ final class IngestSearchRunner { @Override @Messages("SearchRunner.query.exception.msg=Error performing query:") protected Object doInBackground() throws Exception { - if (usingNetBeansGUI) { - /* - * If running in the NetBeans thick client application version - * of Autopsy, NetBeans progress handles (i.e., progress bars) - * are used to display search progress in the lower right hand - * corner of the main application window. - * - * A layer of abstraction to allow alternate representations of - * progress could be used here, as it is in other places in the - * application (see implementations and usage of - * org.sleuthkit.autopsy.progress.ProgressIndicator interface), - * to better decouple keyword search from the application's - * presentation layer. - */ - final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") - + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); - SwingUtilities.invokeLater(() -> { - progressIndicator = ProgressHandle.createHandle(displayName, new Cancellable() { - @Override - public boolean cancel() { - progressIndicator.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); - logger.log(Level.INFO, "Search cancelled by user"); //NON-NLS - new Thread(() -> { - IngestSearchRunner.Searcher.this.cancel(true); - }).start(); - return true; - } - }); - progressIndicator.start(); - progressIndicator.switchToIndeterminate(); - }); - } - final StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { + if (usingNetBeansGUI) { + /* + * If running in the NetBeans thick client application + * version of Autopsy, NetBeans progress handles (i.e., + * progress bars) are used to display search progress in the + * lower right hand corner of the main application window. + * + * A layer of abstraction to allow alternate representations + * of progress could be used here, as it is in other places + * in the application (see implementations and usage of + * org.sleuthkit.autopsy.progress.ProgressIndicator + * interface), to better decouple keyword search from the + * application's presentation layer. + */ + final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); + SwingUtilities.invokeAndWait(() -> { + progressIndicator = ProgressHandle.createHandle(displayName, new Cancellable() { + @Override + public boolean cancel() { + progressIndicator.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg")); + logger.log(Level.INFO, "Search cancelled by user"); //NON-NLS + new Thread(() -> { + IngestSearchRunner.Searcher.this.cancel(true); + }).start(); + return true; + } + }); + progressIndicator.start(); + progressIndicator.switchToIndeterminate(); + }); + } + updateKeywords(); for (Keyword keyword : keywords) { if (isCancelled() || job.getJobContext().fileIngestIsCancelled()) { @@ -525,8 +523,6 @@ final class IngestSearchRunner { String searchTermStr = keyword.getSearchTerm(); if (searchTermStr.length() > 50) { searchTermStr = searchTermStr.substring(0, 49) + "..."; - } else { - searchTermStr = searchTermStr; } final String progressMessage = keywordList.getName() + ": " + searchTermStr; SwingUtilities.invokeLater(() -> { @@ -574,11 +570,12 @@ final class IngestSearchRunner { } catch (Exception ex) { logger.log(Level.WARNING, "Error occurred during keyword search", ex); //NON-NLS } finally { - if (progressGroup != null) { + if (progressIndicator != null) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { - progressGroup.finish(); + progressIndicator.finish(); + progressIndicator = null; } }); } From 2b999eefc6455fd88b767410ae57226deddfa833 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 30 Nov 2021 13:38:40 -0500 Subject: [PATCH 120/142] commenting --- .../autopsy/mainui/datamodel/AbstractDAO.java | 30 +++++++++++++++++-- .../mainui/datamodel/DataArtifactDAO.java | 21 ++++++++++--- .../mainui/nodes/DataArtifactTypeFactory.java | 4 +-- 3 files changed, 47 insertions(+), 8 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index 3e14d9f443..48be21cf40 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -27,7 +27,10 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.function.Function; +import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** @@ -82,7 +85,7 @@ abstract class AbstractDAO { * no data source filtering). * @param itemDataSourceMapping The event digest. */ - void invalidateKeys(Cache, ?> cache, Function> getKeys, Map> itemDataSourceMapping) { + static void invalidateKeys(Cache, ?> cache, Function> getKeys, Map> itemDataSourceMapping) { invalidateKeys(cache, getKeys, Collections.singletonList(itemDataSourceMapping)); } @@ -97,7 +100,7 @@ abstract class AbstractDAO { * no data source filtering). * @param itemDataSourceMapping The list of event digests. */ - void invalidateKeys(Cache, ?> cache, Function> getKeys, List>> itemDataSourceMapping) { + static void invalidateKeys(Cache, ?> cache, Function> getKeys, List>> itemDataSourceMapping) { ConcurrentMap, ?> concurrentMap = cache.asMap(); concurrentMap.forEach((k, v) -> { Pair pairItems = getKeys.apply(k.getParamData()); @@ -112,4 +115,27 @@ abstract class AbstractDAO { }); } + /** + * Returns a set of tree events gathered from the TreeCounts instance after calling flushEvents. + * @param treeCounts The tree counts instance. + * @param converter The means of acquiring a tree item dto to be placed in the TreeEvent. + * @return The generated tree events. + */ + static Set getIngestCompleteEvents(TreeCounts treeCounts, Function> converter) { + return treeCounts.flushEvents().stream() + .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) + .collect(Collectors.toSet()); + } + + /** + * Returns a set of tree events gathered from the TreeCounts instance after calling getEventTimeouts. + * @param treeCounts The tree counts instance. + * @param converter The means of acquiring a tree item dto to be placed in the TreeEvent. + * @return The generated tree events. + */ + static Set getRefreshEvents(TreeCounts treeCounts, Function> converter) { + return treeCounts.getEventTimeouts().stream() + .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) + .collect(Collectors.toSet()); + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index fbcc1fa975..8980bbdbf1 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -414,15 +414,28 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override Set handleIngestComplete() { - return this.treeCounts.flushEvents().stream() - .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) + Set daoEvents = getIngestCompleteEvents(this.treeCounts, + (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); + + Set accountDaoEvents = + getIngestCompleteEvents(this.accountCounts, + (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); + + return Stream.of(daoEvents, accountDaoEvents) + .flatMap(s -> s.stream()) .collect(Collectors.toSet()); } @Override Set shouldRefreshTree() { - return this.treeCounts.getEventTimeouts().stream() - .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED), true)) + Set daoEvents = getRefreshEvents(this.treeCounts, + (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); + + Set accountDaoEvents = getRefreshEvents(this.accountCounts, + (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); + + return Stream.of(daoEvents, accountDaoEvents) + .flatMap(s -> s.stream()) .collect(Collectors.toSet()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 4501c6baf2..7496d24614 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -66,9 +66,9 @@ public class DataArtifactTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent daoEvt) { + protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { - TreeItemDTO originalTreeItem = (TreeItemDTO) daoEvt.getItemRecord(); + TreeItemDTO originalTreeItem = super.getTypedTreeItem(treeEvt, DataArtifactSearchParam.class); if (originalTreeItem != null && !DataArtifactDAO.getIgnoredTreeTypes().contains(originalTreeItem.getSearchParams().getArtifactType()) From 1dd354e5924494d862b0cf3a46a721b010ad2895 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 30 Nov 2021 13:59:15 -0500 Subject: [PATCH 121/142] ordering fix --- .../autopsy/mainui/nodes/DataArtifactTypeFactory.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 7496d24614..fba0e9560e 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -87,7 +87,15 @@ public class DataArtifactTypeFactory extends TreeChildFactory Date: Tue, 30 Nov 2021 16:06:40 -0500 Subject: [PATCH 122/142] merging CommAccountsDAO --- .../corecomponents/DataResultPanel.java | 12 +- .../autopsy/mainui/datamodel/AbstractDAO.java | 6 +- .../mainui/datamodel/AccountSearchParams.java | 78 ------ .../mainui/datamodel/CommAccountsDAO.java | 182 +++++++++---- .../datamodel/CommAccountsSearchParams.java | 4 +- .../mainui/datamodel/DataArtifactDAO.java | 250 +----------------- .../mainui/datamodel/events/AccountEvent.java | 76 ------ .../datamodel/events/CommAccountsEvent.java | 42 ++- .../mainui/nodes/DataArtifactTypeFactory.java | 30 +-- 9 files changed, 194 insertions(+), 486 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java delete mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java index 0beb205624..2f37450d2f 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java @@ -62,13 +62,13 @@ import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageCountChangeEvent; import org.sleuthkit.autopsy.datamodel.BaseChildFactory.PageSizeChangeEvent; import org.sleuthkit.autopsy.datamodel.NodeSelectionInfo; import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSetFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; -import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactAccountFetcher; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsDAO.CommAccountFetcher; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOAggregateEvent; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; @@ -1175,17 +1175,17 @@ public class DataResultPanel extends javax.swing.JPanel implements DataResult, C * * @param accountParams The search parameter query. */ - void displayAccounts(AccountSearchParams accountParams) { + void displayAccounts(CommAccountsSearchParams accountParams) { try { - this.searchResultManager = new SearchManager(new DataArtifactAccountFetcher(accountParams), getPageSize()); + this.searchResultManager = new SearchManager(new CommAccountFetcher(accountParams), getPageSize()); SearchResultsDTO results = searchResultManager.getResults(); displaySearchResults(results, true); } catch (ExecutionException ex) { logger.log(Level.WARNING, MessageFormat.format("There was an error displaying search results for [artifact type: {0}, data source id: {1}, account type: {2}]", - accountParams.getArtifactType(), + accountParams.getType(), accountParams.getDataSourceId() == null ? "" : accountParams.getDataSourceId(), - accountParams.getAccountType()), + accountParams.getType()), ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index 48be21cf40..9172770c47 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -54,7 +54,7 @@ abstract class AbstractDAO { * @return The list of DAOEvents that should be broadcasted to the views or * an empty list if the Autopsy events are irrelevant to this DAO. */ - abstract Set processEvent(PropertyChangeEvent evt); + abstract Set processEvent(PropertyChangeEvent evt); /** * Handles the ingest complete or cancelled event. Any events that are @@ -62,7 +62,7 @@ abstract class AbstractDAO { * * @return The flushed events that were delayed and batched. */ - abstract Set handleIngestComplete(); + abstract Set handleIngestComplete(); /** * Returns any categories that require a tree refresh. For instance, if web @@ -72,7 +72,7 @@ abstract class AbstractDAO { * * @return The categories that require a tree refresh. */ - abstract Set shouldRefreshTree(); + abstract Set shouldRefreshTree(); /** * Using a digest of event information, clears keys in a cache that may be diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java deleted file mode 100644 index b9959f80c5..0000000000 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AccountSearchParams.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.mainui.datamodel; - -import java.util.Objects; -import org.sleuthkit.datamodel.Account; -import org.sleuthkit.datamodel.BlackboardArtifact; - -/** - * Search parameters for accounts. - */ -public class AccountSearchParams extends DataArtifactSearchParam { - - private final Account.Type accountType; - - /** - * Main constructor. - * - * @param accountType The account type identifier. - * @param dataSourceId The data source id to filter on or null. - */ - public AccountSearchParams(Account.Type accountType, Long dataSourceId) { - super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); - this.accountType = accountType; - } - - /** - * @return The account type identifier. - */ - public Account.Type getAccountType() { - return accountType; - } - - @Override - public int hashCode() { - int hash = 7; - hash = 29 * hash + Objects.hashCode(this.accountType); - hash = 29 * hash + super.hashCode(); - return hash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final AccountSearchParams other = (AccountSearchParams) obj; - if (!Objects.equals(this.accountType, other.accountType)) { - return false; - } - return super.equals(obj); - } - - - -} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index a0a7833441..dec8ec6f35 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -22,29 +22,33 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; import java.sql.SQLException; -import java.util.Collection; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.apache.commons.lang3.tuple.Pair; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import static org.sleuthkit.autopsy.mainui.datamodel.AbstractDAO.getIngestCompleteEvents; +import static org.sleuthkit.autopsy.mainui.datamodel.AbstractDAO.getRefreshEvents; import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; +import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; import org.sleuthkit.datamodel.Account; @@ -65,7 +69,8 @@ public class CommAccountsDAO extends AbstractDAO { private static final int CACHE_SIZE = Account.Type.PREDEFINED_ACCOUNT_TYPES.size(); // number of cached SearchParams sub-types private static final long CACHE_DURATION = 2; private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES; - private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private final Cache, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build(); + private final TreeCounts accountCounts = new TreeCounts<>(); private static CommAccountsDAO instance = null; @@ -77,6 +82,10 @@ public class CommAccountsDAO extends AbstractDAO { return instance; } + SleuthkitCase getCase() throws NoCurrentCaseException { + return Case.getCurrentCaseThrows().getSleuthkitCase(); + } + public SearchResultsDTO getCommAcounts(CommAccountsSearchParams key, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { if (key.getType() == null) { throw new IllegalArgumentException("Must have non-null type"); @@ -150,66 +159,140 @@ public class CommAccountsDAO extends AbstractDAO { return new DataArtifactTableSearchResultsDTO(BlackboardArtifact.Type.TSK_ACCOUNT, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), allArtifacts.size()); } - @Override - void clearCaches() { - this.searchParamsCache.invalidateAll();; + private static TreeResultsDTO.TreeItemDTO createAccountTreeItem(Account.Type accountType, Long dataSourceId, TreeResultsDTO.TreeDisplayCount count) { + return new TreeResultsDTO.TreeItemDTO<>( + "ACCOUNTS", + new CommAccountsSearchParams(accountType, dataSourceId), + accountType.getTypeName(), + accountType.getDisplayName(), + count); + } + + /** + * Returns the accounts and their counts in the current data source if a + * data source id is provided or all accounts if data source id is null. + * + * @param dataSourceId The data source id or null for no data source filter. + * + * @return The results. + * + * @throws ExecutionException + */ + public TreeResultsDTO getAccountsCounts(Long dataSourceId) throws ExecutionException { + String query = "res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n" + + "FROM (\n" + + " SELECT MIN(account_types.type_name) AS account_type, MIN(account_types.display_name) AS account_display_name\n" + + " FROM blackboard_artifacts\n" + + " LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n" + + " LEFT JOIN account_types ON blackboard_attributes.value_text = account_types.type_name\n" + + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + "\n" + + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n" + + (dataSourceId != null && dataSourceId > 0 ? " AND blackboard_artifacts.data_source_obj_id = " + dataSourceId + " " : " ") + "\n" + + " -- group by artifact_id to ensure only one account type per artifact\n" + + " GROUP BY blackboard_artifacts.artifact_id\n" + + ") res\n" + + "GROUP BY res.account_type\n" + + "ORDER BY MIN(res.account_display_name)"; + + List> accountParams = new ArrayList<>(); + try { + getCase().getCaseDbAccessManager().select(query, (resultSet) -> { + try { + while (resultSet.next()) { + String accountTypeName = resultSet.getString("account_type"); + String accountDisplayName = resultSet.getString("account_display_name"); + Account.Type accountType = new Account.Type(accountTypeName, accountDisplayName); + long count = resultSet.getLong("count"); + accountParams.add(createAccountTreeItem(accountType, dataSourceId, TreeResultsDTO.TreeDisplayCount.getDeterminate(count))); + } + } catch (SQLException ex) { + logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); + } + }); + + // return results + return new TreeResultsDTO<>(accountParams); + + } catch (NoCurrentCaseException | TskCoreException ex) { + throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); + } } @Override - Set handleIngestComplete() { - // GVDTODO - return Collections.emptySet(); + void clearCaches() { + this.searchParamsCache.invalidateAll(); + this.handleIngestComplete(); + } + + @Override + Set handleIngestComplete() { + return getIngestCompleteEvents( + this.accountCounts, + (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED) + ); } @Override Set shouldRefreshTree() { - // GVDTODO - return Collections.emptySet(); + return getRefreshEvents( + this.accountCounts, + (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED) + ); } @Override Set processEvent(PropertyChangeEvent evt) { - // maps account type to the data sources affected - // GVDTODO this can probably be rewritten now that it isn't handling a list of autopsy events - Map> commAccountsAffected = new HashMap<>(); - try { - - String eventType = evt.getPropertyName(); - if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) { - ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue(); - if (null != eventData - && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { - - // check that the update is for the same account type - for (BlackboardArtifact artifact : eventData.getArtifacts()) { - BlackboardAttribute typeAttr = artifact.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); - commAccountsAffected.computeIfAbsent(typeAttr.getValueString(), (k) -> new HashSet<>()) - .add(artifact.getDataSourceObjectID()); - } - } - } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Unable to properly handle module data event.", ex); + // get a grouping of artifacts mapping the artifact type id to data source id. + ModuleDataEvent dataEvt = DAOEventUtils.getModuelDataFromArtifactEvent(evt); + if (dataEvt == null) { + return Collections.emptySet(); } - // invalidate cache entries that are affected by events - ConcurrentMap, SearchResultsDTO> concurrentMap = this.searchParamsCache.asMap(); - concurrentMap.forEach((k, v) -> { - Object objectKey = k.getParamData(); - if (objectKey instanceof CommAccountsSearchParams) { - CommAccountsSearchParams commAcctKey = (CommAccountsSearchParams) objectKey; - Set dsIdsAffected = commAccountsAffected.get(commAcctKey.getType().getTypeName()); - if (dsIdsAffected != null - && (commAcctKey.getDataSourceId() == null - || dsIdsAffected.contains(commAcctKey.getDataSourceId()))) { + Map> accountTypeMap = new HashMap<>(); - concurrentMap.remove(k); + for (BlackboardArtifact art : dataEvt.getArtifacts()) { + try { + if (art.getType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { + BlackboardAttribute accountTypeAttribute = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); + if (accountTypeAttribute == null) { + continue; + } + + String accountTypeName = accountTypeAttribute.getValueString(); + if (accountTypeName == null) { + continue; + } + + accountTypeMap.computeIfAbsent(getCase().getCommunicationsManager().getAccountType(accountTypeName), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); } + } catch (NoCurrentCaseException | TskCoreException ex) { + logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); } - }); + } - return commAccountsAffected.entrySet().stream() - .flatMap(entry -> entry.getValue().stream().map(dsId -> new CommAccountsEvent(entry.getKey(), dsId))) + // don't do anything else if no relevant events + if (accountTypeMap.isEmpty()) { + return Collections.emptySet(); + } + + super.invalidateKeys(this.searchParamsCache, + (sp) -> Pair.of(sp.getType(), sp.getDataSourceId()), accountTypeMap); + + List accountEvents = new ArrayList<>(); + for (Map.Entry> entry : accountTypeMap.entrySet()) { + Account.Type accountType = entry.getKey(); + for (Long dsObjId : entry.getValue()) { + CommAccountsEvent newEvt = new CommAccountsEvent(accountType, dsObjId); + accountEvents.add(newEvt); + } + } + + Stream treeEvents = this.accountCounts.enqueueAll(accountEvents).stream() + .map(daoEvt -> new TreeEvent(createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.INDETERMINATE), false)); + + return Stream.of(accountEvents.stream(), treeEvents) + .flatMap(s -> s) .collect(Collectors.toSet()); } @@ -225,10 +308,11 @@ public class CommAccountsDAO extends AbstractDAO { private boolean isCommAcctInvalidating(CommAccountsSearchParams parameters, DAOEvent evt) { if (evt instanceof CommAccountsEvent) { CommAccountsEvent commEvt = (CommAccountsEvent) evt; - return (parameters.getType().getTypeName().equals(commEvt.getAccountType())) - && (parameters.getDataSourceId() == null || parameters.getDataSourceId() == commEvt.getDataSourceId()); + return (parameters.getType().getTypeName().equals(commEvt.getType())) + && (parameters.getDataSourceId() == null || Objects.equals(parameters.getDataSourceId(), commEvt.getDataSourceId())); } else { return false; + } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java index 91c5e272ae..8b187b15a7 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsSearchParams.java @@ -20,16 +20,18 @@ package org.sleuthkit.autopsy.mainui.datamodel; import java.util.Objects; import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.BlackboardArtifact; /** * Key for accessing data about communication accounts from the DAO. */ -public class CommAccountsSearchParams { +public class CommAccountsSearchParams extends DataArtifactSearchParam { private final Account.Type type; private final Long dataSourceId; public CommAccountsSearchParams(Account.Type type, Long dataSourceId) { + super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); this.type = type; this.dataSourceId = dataSourceId; } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 8980bbdbf1..0a89b16db3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -24,7 +24,6 @@ import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.beans.PropertyChangeEvent; -import java.sql.SQLException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; @@ -34,9 +33,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.Set; -import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.concurrent.ExecutionException; @@ -47,14 +44,11 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; -import org.sleuthkit.autopsy.mainui.datamodel.events.AccountEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher; -import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.SleuthkitCase; @@ -85,9 +79,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } private final Cache, DataArtifactTableSearchResultsDTO> dataArtifactCache = CacheBuilder.newBuilder().maximumSize(1000).build(); - private final Cache, DataArtifactTableSearchResultsDTO> accountCache = CacheBuilder.newBuilder().maximumSize(1000).build(); private final TreeCounts treeCounts = new TreeCounts<>(); - private final TreeCounts accountCounts = new TreeCounts<>(); private DataArtifactTableSearchResultsDTO fetchDataArtifactsForTable(SearchParams cacheKey) throws NoCurrentCaseException, TskCoreException { @@ -129,74 +121,6 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return dataArtifactCache.get(searchParams, () -> fetchDataArtifactsForTable(searchParams)); } - /** - * Fetch data artifacts with the given account type from the database. - * - * @param searchParams The search params for the account type to fetch. - * - * @return The results. - * - * @throws NoCurrentCaseException - * @throws TskCoreException - */ - private DataArtifactTableSearchResultsDTO fetchAccounts(SearchParams searchParams) throws NoCurrentCaseException, TskCoreException { - - // TODO improve performance - SleuthkitCase skCase = getCase(); - Blackboard blackboard = skCase.getBlackboard(); - - Long dataSourceId = searchParams.getParamData().getDataSourceId(); - BlackboardArtifact.Type artType = searchParams.getParamData().getArtifactType(); - - // We currently can't make a query on the set name field because need to use a prepared statement - String originalWhereClause = " artifacts.artifact_type_id = " + artType.getTypeID() + " "; - if (dataSourceId != null) { - originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " "; - } - - String expectedAccountTypeName = searchParams.getParamData().getAccountType().getTypeName(); - - List allAccounts = new ArrayList<>(); - allAccounts.addAll(blackboard.getDataArtifactsWhere(originalWhereClause)); - blackboard.loadBlackboardAttributes(allAccounts); - - // Filter for the selected set - List arts = new ArrayList<>(); - for (BlackboardArtifact art : allAccounts) { - BlackboardAttribute accountTypeAttr = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); - if ((expectedAccountTypeName == null && accountTypeAttr == null) - || (expectedAccountTypeName != null && accountTypeAttr != null && expectedAccountTypeName.equals(accountTypeAttr.getValueString()))) { - arts.add(art); - } - } - - List pagedArtifacts = getPaged(arts, searchParams); - TableData tableData = createTableData(artType, pagedArtifacts); - return new DataArtifactTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, searchParams.getStartItem(), arts.size()); - } - - /** - * Gets the cached data or fetched data for the given account search params. - * - * @param searchParams The search params. - * @param startItem The starting item. - * @param maxCount The maximum count of items to return. - * - * @return The resulting data. - * - * @throws ExecutionException - * @throws IllegalArgumentException - */ - public DataArtifactTableSearchResultsDTO getAccountsForTable(AccountSearchParams searchParams, long startItem, Long maxCount) throws ExecutionException, IllegalArgumentException { - if (searchParams.getDataSourceId() != null && searchParams.getDataSourceId() < 0) { - throw new IllegalArgumentException(MessageFormat.format("Data source id must be null or > 0.", - searchParams.getDataSourceId() == null ? "" : searchParams.getDataSourceId())); - } - - SearchParams pagedSearchParams = new SearchParams<>(searchParams, startItem, maxCount); - return accountCache.get(pagedSearchParams, () -> fetchAccounts(pagedSearchParams)); - } - private boolean isDataArtifactInvalidating(DataArtifactSearchParam key, DAOEvent eventData) { if (!(eventData instanceof DataArtifactEvent)) { return false; @@ -207,16 +131,6 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - private boolean isAccountInvalidating(AccountSearchParams parameters, DAOEvent evt) { - if (!(evt instanceof AccountEvent)) { - return false; - } else { - AccountEvent accountEvt = (AccountEvent) evt; - return Objects.equals(parameters.getAccountType(), evt.getType()) - && (parameters.getDataSourceId() == null || (parameters.getDataSourceId() == accountEvt.getDataSourceId())); - } - } - /** * Returns a search results dto containing rows of counts data. * @@ -256,60 +170,9 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - /** - * Returns the accounts and their counts in the current data source if a - * data source id is provided or all accounts if data source id is null. - * - * @param dataSourceId The data source id or null for no data source filter. - * - * @return The results. - * - * @throws ExecutionException - */ - public TreeResultsDTO getAccountsCounts(Long dataSourceId) throws ExecutionException { - String query = "res.account_type AS account_type, MIN(res.account_display_name) AS account_display_name, COUNT(*) AS count\n" - + "FROM (\n" - + " SELECT MIN(account_types.type_name) AS account_type, MIN(account_types.display_name) AS account_display_name\n" - + " FROM blackboard_artifacts\n" - + " LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n" - + " LEFT JOIN account_types ON blackboard_attributes.value_text = account_types.type_name\n" - + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + "\n" - + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n" - + (dataSourceId != null && dataSourceId > 0 ? " AND blackboard_artifacts.data_source_obj_id = " + dataSourceId + " " : " ") + "\n" - + " -- group by artifact_id to ensure only one account type per artifact\n" - + " GROUP BY blackboard_artifacts.artifact_id\n" - + ") res\n" - + "GROUP BY res.account_type\n" - + "ORDER BY MIN(res.account_display_name)"; - - List> accountParams = new ArrayList<>(); - try { - getCase().getCaseDbAccessManager().select(query, (resultSet) -> { - try { - while (resultSet.next()) { - String accountTypeName = resultSet.getString("account_type"); - String accountDisplayName = resultSet.getString("account_display_name"); - Account.Type accountType = new Account.Type(accountTypeName, accountDisplayName); - long count = resultSet.getLong("count"); - accountParams.add(createAccountTreeItem(accountType, dataSourceId, TreeDisplayCount.getDeterminate(count))); - } - } catch (SQLException ex) { - logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); - } - }); - - // return results - return new TreeResultsDTO<>(accountParams); - - } catch (NoCurrentCaseException | TskCoreException ex) { - throw new ExecutionException("An error occurred while fetching data artifact counts.", ex); - } - } - @Override void clearCaches() { this.dataArtifactCache.invalidateAll(); - this.accountCache.invalidateAll(); this.handleIngestComplete(); } @@ -322,46 +185,28 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } Map> artifactTypeDataSourceMap = new HashMap<>(); - Map> accountTypeMap = new HashMap<>(); for (BlackboardArtifact art : dataEvt.getArtifacts()) { try { - if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory())) { - if (art.getType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { - BlackboardAttribute accountTypeAttribute = art.getAttribute(BlackboardAttribute.Type.TSK_ACCOUNT_TYPE); - if (accountTypeAttribute == null) { - continue; - } + if (BlackboardArtifact.Category.DATA_ARTIFACT.equals(art.getType().getCategory()) + // accounts are handled in CommAccountsDAO + && art.getType().getTypeID() != BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { - String accountTypeName = accountTypeAttribute.getValueString(); - if (accountTypeName == null) { - continue; - } - - accountTypeMap.computeIfAbsent(getCase().getCommunicationsManager().getAccountType(accountTypeName), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); - } else { - artifactTypeDataSourceMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) - .add(art.getDataSourceObjectID()); - } + artifactTypeDataSourceMap.computeIfAbsent(art.getType(), (k) -> new HashSet<>()) + .add(art.getDataSourceObjectID()); } - } catch (NoCurrentCaseException | TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.WARNING, "Unable to fetch artifact category for artifact with id: " + art.getId(), ex); } } // don't do anything else if no relevant events - if (artifactTypeDataSourceMap.isEmpty() && accountTypeMap.isEmpty()) { + if (artifactTypeDataSourceMap.isEmpty()) { return Collections.emptySet(); } super.invalidateKeys(this.dataArtifactCache, (sp) -> Pair.of(sp.getArtifactType(), sp.getDataSourceId()), artifactTypeDataSourceMap); - super.invalidateKeys(this.accountCache, (sp) -> Pair.of(sp.getAccountType(), sp.getDataSourceId()), accountTypeMap); - - return getDAOEvts(accountTypeMap, artifactTypeDataSourceMap); - } - private Set getDAOEvts(Map> accountTypeDataSourceMap, Map> artifactTypeDataSourceMap) { // gather dao events based on artifacts List dataArtifactEvents = new ArrayList<>(); for (Entry> entry : artifactTypeDataSourceMap.entrySet()) { @@ -372,25 +217,11 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { } } - List dataArtifactTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() - .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) - .collect(Collectors.toList()); + Stream dataArtifactTreeEvents = this.treeCounts.enqueueAll(dataArtifactEvents).stream() + .map(daoEvt -> new TreeEvent(createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)); - List accountEvents = new ArrayList<>(); - for (Entry> entry : accountTypeDataSourceMap.entrySet()) { - Account.Type accountType = entry.getKey(); - for (Long dsObjId : entry.getValue()) { - AccountEvent newEvt = new AccountEvent(accountType, dsObjId); - accountEvents.add(newEvt); - } - } - - List newAccountTreeEvents = this.accountCounts.enqueueAll(accountEvents).stream() - .map(daoEvt -> new TreeEvent(createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeDisplayCount.INDETERMINATE), false)) - .collect(Collectors.toList()); - - return Stream.of(dataArtifactEvents, dataArtifactTreeEvents, accountEvents, newAccountTreeEvents) - .flatMap((lst) -> lst.stream()) + return Stream.of(dataArtifactEvents.stream(), dataArtifactTreeEvents) + .flatMap(s -> s) .collect(Collectors.toSet()); } @@ -403,40 +234,16 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { displayCount); } - private static TreeItemDTO createAccountTreeItem(Account.Type accountType, Long dataSourceId, TreeDisplayCount count) { - return new TreeItemDTO<>( - "ACCOUNTS", - new AccountSearchParams(accountType, dataSourceId), - accountType.getTypeName(), - accountType.getDisplayName(), - count); - } - @Override - Set handleIngestComplete() { - Set daoEvents = getIngestCompleteEvents(this.treeCounts, + Set handleIngestComplete() { + return getIngestCompleteEvents(this.treeCounts, (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); - - Set accountDaoEvents = - getIngestCompleteEvents(this.accountCounts, - (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); - - return Stream.of(daoEvents, accountDaoEvents) - .flatMap(s -> s.stream()) - .collect(Collectors.toSet()); } @Override Set shouldRefreshTree() { - Set daoEvents = getRefreshEvents(this.treeCounts, + return getRefreshEvents(this.treeCounts, (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); - - Set accountDaoEvents = getRefreshEvents(this.accountCounts, - (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); - - return Stream.of(daoEvents, accountDaoEvents) - .flatMap(s -> s.stream()) - .collect(Collectors.toSet()); } @@ -468,33 +275,4 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return getDAO().isDataArtifactInvalidating(this.getParameters(), evt); } } - - /** - * Handles fetching and paging of account data artifacts. - */ - public static class DataArtifactAccountFetcher extends DAOFetcher { - - /** - * Main constructor. - * - * @param params Parameters to handle fetching of data. - */ - public DataArtifactAccountFetcher(AccountSearchParams params) { - super(params); - } - - protected DataArtifactDAO getDAO() { - return MainDAO.getInstance().getDataArtifactsDAO(); - } - - @Override - public SearchResultsDTO getSearchResults(int pageSize, int pageIdx) throws ExecutionException { - return getDAO().getAccountsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize); - } - - @Override - public boolean isRefreshRequired(DAOEvent evt) { - return getDAO().isAccountInvalidating(this.getParameters(), evt); - } - } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java deleted file mode 100644 index c9cec5071c..0000000000 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/AccountEvent.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.mainui.datamodel.events; - -import java.util.Objects; -import org.sleuthkit.datamodel.Account; -import org.sleuthkit.datamodel.BlackboardArtifact; - -/** - * Search parameters for accounts. - */ -public class AccountEvent extends DataArtifactEvent { - - private final Account.Type accountType; - - /** - * Main constructor. - * - * @param accountType The account type identifier. - * @param dataSourceId The data source id to filter on or null. - */ - public AccountEvent(Account.Type accountType, Long dataSourceId) { - super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); - this.accountType = accountType; - } - - /** - * @return The account type identifier. - */ - public Account.Type getAccountType() { - return accountType; - } - - @Override - public int hashCode() { - int hash = 7; - hash = 29 * hash + Objects.hashCode(this.accountType); - hash = 29 * hash + super.hashCode(); - return hash; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final AccountEvent other = (AccountEvent) obj; - if (!Objects.equals(this.accountType, other.accountType)) { - return false; - } - return super.equals(obj); - } - -} diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java index c4c2978077..1da76f7f34 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/events/CommAccountsEvent.java @@ -19,33 +19,39 @@ package org.sleuthkit.autopsy.mainui.datamodel.events; import java.util.Objects; +import org.sleuthkit.datamodel.Account; +import org.sleuthkit.datamodel.BlackboardArtifact; /** * An event for handling */ -public class CommAccountsEvent implements DAOEvent { +public class CommAccountsEvent extends DataArtifactEvent { - private final String accountType; - private final Long dataSourceId; + private final Account.Type accountType; - public CommAccountsEvent(String accountType, Long dataSourceId) { + /** + * Main constructor. + * + * @param accountType The account type identifier. + * @param dataSourceId The data source id to filter on or null. + */ + public CommAccountsEvent(Account.Type accountType, Long dataSourceId) { + super(BlackboardArtifact.Type.TSK_ACCOUNT, dataSourceId); this.accountType = accountType; - this.dataSourceId = dataSourceId; } - public String getAccountType() { + /** + * @return The account type identifier. + */ + public Account.Type getAccountType() { return accountType; } - public Long getDataSourceId() { - return dataSourceId; - } - @Override public int hashCode() { - int hash = 3; - hash = 17 * hash + Objects.hashCode(this.accountType); - hash = 17 * hash + Objects.hashCode(this.dataSourceId); + int hash = 7; + hash = 29 * hash + Objects.hashCode(this.accountType); + hash = 29 * hash + super.hashCode(); return hash; } @@ -64,15 +70,7 @@ public class CommAccountsEvent implements DAOEvent { if (!Objects.equals(this.accountType, other.accountType)) { return false; } - if (!Objects.equals(this.dataSourceId, other.dataSourceId)) { - return false; - } - return true; - } - - @Override - public Type getType() { - return Type.RESULT; + return super.equals(obj); } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index fba0e9560e..56b56979a7 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -22,7 +22,7 @@ import java.util.Objects; import java.util.concurrent.ExecutionException; import org.openide.nodes.Children; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; @@ -171,7 +171,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory { + static class AccountTypeFactory extends TreeChildFactory { private final Long dataSourceId; @@ -187,40 +187,40 @@ public class DataArtifactTypeFactory extends TreeChildFactory getChildResults() throws IllegalArgumentException, ExecutionException { - return MainDAO.getInstance().getDataArtifactsDAO().getAccountsCounts(this.dataSourceId); + protected TreeResultsDTO getChildResults() throws IllegalArgumentException, ExecutionException { + return MainDAO.getInstance().getCommAccountsDAO().getAccountsCounts(this.dataSourceId); } @Override - protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { + protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { return new AccountTypeNode(rowData); } @Override - protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { + protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { - TreeItemDTO originalTreeItem = getTypedTreeItem(treeEvt, AccountSearchParams.class); + TreeItemDTO originalTreeItem = getTypedTreeItem(treeEvt, CommAccountsSearchParams.class); if (originalTreeItem != null && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { - AccountSearchParams searchParam = originalTreeItem.getSearchParams(); + CommAccountsSearchParams searchParam = originalTreeItem.getSearchParams(); return TreeChildFactory.createTreeItemDTO(originalTreeItem, - new AccountSearchParams(searchParam.getAccountType(), searchParam.getDataSourceId())); + new CommAccountsSearchParams(searchParam.getType(), searchParam.getDataSourceId())); } return null; } @Override - public int compare(AccountSearchParams o1, AccountSearchParams o2) { - return o1.getAccountType().getDisplayName().compareToIgnoreCase(o2.getAccountType().getDisplayName()); + public int compare(CommAccountsSearchParams o1, CommAccountsSearchParams o2) { + return o1.getType().getDisplayName().compareToIgnoreCase(o2.getType().getDisplayName()); } } /** * A node representing a single account type in the tree. */ - static class AccountTypeNode extends TreeNode { + static class AccountTypeNode extends TreeNode { private static final String ICON_BASE_PATH = "org/sleuthkit/autopsy/images/"; //NON-NLS @@ -263,9 +263,9 @@ public class DataArtifactTypeFactory extends TreeChildFactory itemData) { - super(itemData.getSearchParams().getArtifactType().getTypeName(), - getAccountIconPath(itemData.getSearchParams().getAccountType()), + public AccountTypeNode(TreeResultsDTO.TreeItemDTO itemData) { + super(itemData.getSearchParams().getType().getTypeName(), + getAccountIconPath(itemData.getSearchParams().getType()), itemData); } From 4d964e627a8551dcb96a76aa794d90adf4712516 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 16:12:16 -0500 Subject: [PATCH 123/142] 8202 address review comments --- .../autopsy/ingest/IngestJobExecutor.java | 38 ++++++++++--------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 9eeb220cc2..78f7238df4 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -541,7 +541,7 @@ final class IngestJobExecutor { } /** - * Determnines which ingets job stage to start in and starts up the ingest + * Determnines which ingest job stage to start in and starts up the ingest * module pipelines. * * @return A collection of ingest module startup errors, empty on success. @@ -676,14 +676,23 @@ final class IngestJobExecutor { if (hasFileIngestModules()) { /* - * Do a count of the files the data source processor has added - * to the case database. This number will be used to estimate - * how many files remain to be analyzed as each file ingest task - * is completed. + * Do an estimate of the total number of files to be analyzed. + * This number will be used to estimate of how many files remain + * to be analyzed as each file ingest task is completed. The + * numbers are estimates because file analysis can add carved + * files and/or derived files. */ if (files.isEmpty()) { + /* + * Do a count of the files the data source processor has + * added to the case database. + */ estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); } else { + /* + * Use the number of files in the specified subset of all of + * the files for the data source. + */ estimatedFilesToProcess = files.size(); } startFileIngestProgressBar(); @@ -794,11 +803,7 @@ final class IngestJobExecutor { /* * For ingest job progress reporting purposes, do a count of the * files the data source processor has added to the case - * database. This number will be used to estimate how many files - * remain to be analyzed as each file ingest task is completed. - * The estimate will likely be an over-estimate, since some of - * the files will have already been "streamed" to this job and - * processed. + * database. */ estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor()); switchFileIngestProgressBarToDeterminate(); @@ -1204,6 +1209,7 @@ final class IngestJobExecutor { void addFiles(List files) { if (stage.equals(IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY) || stage.equals(IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS)) { + estimatedFilesToProcess += files.size(); taskScheduler.fastTrackFileIngestTasks(this, files); } else { logErrorMessage(Level.SEVERE, "Adding streaming files to job during stage " + stage.toString() + " not supported"); @@ -1362,19 +1368,17 @@ final class IngestJobExecutor { /** * Updates the current file ingest progress bar upon start of analysis of a - * file, if the job has not been cancelled, if the job has not been - * cancelled. + * file, if the job has not been cancelled. * * @param fileName The name of the file. */ private void updateFileIngestProgressForFileTaskStarted(String fileName) { if (usingNetBeansGUI && !jobCancelled) { SwingUtilities.invokeLater(() -> { - if (processedFiles <= estimatedFilesToProcess) { - fileIngestProgressBar.progress(fileName, (int) processedFiles); - } else { - fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); - } + /* + * Note that if processedFiles exceeds estimatedFilesToProcess + */ + fileIngestProgressBar.progress(fileName, (int) processedFiles); filesInProgress.add(fileName); }); } From b5c2d74597054cf31d51ce557079c02529ee1c32 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 30 Nov 2021 16:19:36 -0500 Subject: [PATCH 124/142] fixes --- .../corecomponents/DataResultTopComponent.java | 4 ++-- .../autopsy/mainui/datamodel/CommAccountsDAO.java | 12 +++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java index 1f6d79aa37..1994b158cd 100644 --- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java @@ -42,11 +42,11 @@ import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.directorytree.ExternalViewerShortcutAction; -import org.sleuthkit.autopsy.mainui.datamodel.AccountSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam; +import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams; import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam; @@ -489,7 +489,7 @@ public final class DataResultTopComponent extends TopComponent implements DataRe * * @param accountParams The search parameter query. */ - public void displayAccounts(AccountSearchParams accountParams) { + public void displayAccounts(CommAccountsSearchParams accountParams) { dataResultPanel.displayAccounts(accountParams); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index dec8ec6f35..2b3e3e3dcb 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -45,6 +45,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import static org.sleuthkit.autopsy.mainui.datamodel.AbstractDAO.getIngestCompleteEvents; import static org.sleuthkit.autopsy.mainui.datamodel.AbstractDAO.getRefreshEvents; +import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEventUtils; @@ -196,6 +197,11 @@ public class CommAccountsDAO extends AbstractDAO { List> accountParams = new ArrayList<>(); try { + Set indeterminateTypes = this.accountCounts.getEnqueued().stream() + .filter(evt -> dataSourceId == null || evt.getDataSourceId() == dataSourceId) + .map(evt -> evt.getAccountType()) + .collect(Collectors.toSet()); + getCase().getCaseDbAccessManager().select(query, (resultSet) -> { try { while (resultSet.next()) { @@ -203,7 +209,11 @@ public class CommAccountsDAO extends AbstractDAO { String accountDisplayName = resultSet.getString("account_display_name"); Account.Type accountType = new Account.Type(accountTypeName, accountDisplayName); long count = resultSet.getLong("count"); - accountParams.add(createAccountTreeItem(accountType, dataSourceId, TreeResultsDTO.TreeDisplayCount.getDeterminate(count))); + TreeDisplayCount treeDisplayCount = indeterminateTypes.contains(accountType) + ? TreeDisplayCount.INDETERMINATE + : TreeResultsDTO.TreeDisplayCount.getDeterminate(count); + + accountParams.add(createAccountTreeItem(accountType, dataSourceId, treeDisplayCount)); } } catch (SQLException ex) { logger.log(Level.WARNING, "An error occurred while fetching artifact type counts.", ex); From 1aa1c80b53e4096938efd354b4ef50ab97e563d3 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 16:20:58 -0500 Subject: [PATCH 125/142] 8202 KWS cancellation out of EDT, progress ops in EDT --- .../sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java index 1174e6fc05..bb2fbe189a 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -492,9 +492,9 @@ final class IngestSearchRunner { * interface), to better decouple keyword search from the * application's presentation layer. */ - final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") - + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); SwingUtilities.invokeAndWait(() -> { + final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + + (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); progressIndicator = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { From e7c3eb14bb2a17ac123866fefb4d9ef26835499d Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 16:30:55 -0500 Subject: [PATCH 126/142] 8202 KWS cancellation out of EDT, progress ops in EDT --- .../src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java index d58f9c1a5c..313bad3e60 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java @@ -333,7 +333,7 @@ class QueryResults { } detailsSb.append(""); //NON-NLS - String key = uniqueKey; // RC: Might be null, does this work? + final String key = uniqueKey; // Might be null, but that's supported. SwingUtilities.invokeLater(() -> { IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), key, artifact)); }); From f0eed25f2178836231a0730af8c9ef816c5b5768 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 17:06:19 -0500 Subject: [PATCH 127/142] 8202 KWS cancellation out of EDT, progress ops in EDT --- Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 1 - 1 file changed, 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 78f7238df4..fd331dfab7 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1209,7 +1209,6 @@ final class IngestJobExecutor { void addFiles(List files) { if (stage.equals(IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY) || stage.equals(IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS)) { - estimatedFilesToProcess += files.size(); taskScheduler.fastTrackFileIngestTasks(this, files); } else { logErrorMessage(Level.SEVERE, "Adding streaming files to job during stage " + stage.toString() + " not supported"); From 1804f4cb1c9f4c1085d5c39ea4c8ad0cf7515b2b Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 17:54:37 -0500 Subject: [PATCH 128/142] 8202 fix progress bar IllegalArgumentException --- .../sleuthkit/autopsy/ingest/IngestJobExecutor.java | 10 +++++++++- .../org/netbeans/core/startup/Bundle.properties | 2 +- .../netbeans/core/windows/view/ui/Bundle.properties | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index fd331dfab7..31df9b39f6 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1375,8 +1375,16 @@ final class IngestJobExecutor { if (usingNetBeansGUI && !jobCancelled) { SwingUtilities.invokeLater(() -> { /* - * Note that if processedFiles exceeds estimatedFilesToProcess + * Note that if processedFiles exceeds estimatedFilesToProcess, + * the progress bar will go into an infinte loop throwing + * IllegalArgumentExceptions in the EDT. */ + long processedFilesCapture = processedFiles; + if (processedFiles <= estimatedFilesToProcess) { + fileIngestProgressBar.progress(fileName, (int) processedFilesCapture); + } else { + fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); + } fileIngestProgressBar.progress(fileName, (int) processedFiles); filesInProgress.add(fileName); }); diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index cd253dc3cb..e9620c3d7a 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Thu, 30 Sep 2021 19:36:31 -0400 +#Tue, 30 Nov 2021 17:19:50 -0500 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 2d02262803..f591caf623 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Thu, 30 Sep 2021 19:36:31 -0400 +#Tue, 30 Nov 2021 17:19:50 -0500 CTL_MainWindow_Title=Autopsy 4.19.2 CTL_MainWindow_Title_No_Project=Autopsy 4.19.2 From 73e4f647b1f5a2dda8fe2a99daa1d5cd86d694f8 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 17:57:02 -0500 Subject: [PATCH 129/142] 8202 fix progress bar IllegalArgumentException --- Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 31df9b39f6..83c705acb1 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1376,7 +1376,8 @@ final class IngestJobExecutor { SwingUtilities.invokeLater(() -> { /* * Note that if processedFiles exceeds estimatedFilesToProcess, - * the progress bar will go into an infinte loop throwing + * i.e., max work units fopr the progress bar, the progress bar + * will go into an infinte loop throwing * IllegalArgumentExceptions in the EDT. */ long processedFilesCapture = processedFiles; From f7e1886a71ca6ec313aa7dea846208b0867d1cb0 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 17:57:34 -0500 Subject: [PATCH 130/142] 8202 fix progress bar IllegalArgumentException --- Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 83c705acb1..f397dd55d9 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1376,7 +1376,7 @@ final class IngestJobExecutor { SwingUtilities.invokeLater(() -> { /* * Note that if processedFiles exceeds estimatedFilesToProcess, - * i.e., max work units fopr the progress bar, the progress bar + * i.e., max work units for the progress bar, the progress bar * will go into an infinte loop throwing * IllegalArgumentExceptions in the EDT. */ From b28b777ff49a6eec9cb45928a967f2ac1046a4ab Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 17:58:48 -0500 Subject: [PATCH 131/142] 8202 fix progress bar IllegalArgumentException --- Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index f397dd55d9..f49d6ecb97 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1376,8 +1376,8 @@ final class IngestJobExecutor { SwingUtilities.invokeLater(() -> { /* * Note that if processedFiles exceeds estimatedFilesToProcess, - * i.e., max work units for the progress bar, the progress bar - * will go into an infinte loop throwing + * i.e., the max work units set for the progress bar, the + * progress bar will go into an infinite loop throwing * IllegalArgumentExceptions in the EDT. */ long processedFilesCapture = processedFiles; From 2b9b608bcf19893eb9e5899e688ce28637b47cbc Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 17:59:40 -0500 Subject: [PATCH 132/142] 8202 fix progress bar IllegalArgumentException --- Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index f49d6ecb97..660df7326b 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1381,7 +1381,7 @@ final class IngestJobExecutor { * IllegalArgumentExceptions in the EDT. */ long processedFilesCapture = processedFiles; - if (processedFiles <= estimatedFilesToProcess) { + if (processedFilesCapture <= estimatedFilesToProcess) { fileIngestProgressBar.progress(fileName, (int) processedFilesCapture); } else { fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); From 220a0a7cbb7cc464f30bcdb44d4a59e880ad9ee3 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 18:03:48 -0500 Subject: [PATCH 133/142] 8202 fix progress bar IllegalArgumentException --- .../sleuthkit/autopsy/ingest/IngestJobExecutor.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 660df7326b..1bfb817cde 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1375,10 +1375,15 @@ final class IngestJobExecutor { if (usingNetBeansGUI && !jobCancelled) { SwingUtilities.invokeLater(() -> { /* - * Note that if processedFiles exceeds estimatedFilesToProcess, - * i.e., the max work units set for the progress bar, the - * progress bar will go into an infinite loop throwing - * IllegalArgumentExceptions in the EDT. + * If processedFiles exceeds estimatedFilesToProcess, i.e., the + * max work units set for the progress bar, the progress bar + * will go into an infinite loop throwing + * IllegalArgumentExceptions in the EDT (NetBeans bug). Also, a + * check-then-act race condition needs to be avoided here. This + * can be done without guarding processedFiles and + * estimatedFilesToProcess with the same lock because + * estimatedFilesToProcess does not change after it is used to + * switch the progress bar to determinate mode. */ long processedFilesCapture = processedFiles; if (processedFilesCapture <= estimatedFilesToProcess) { From b591883ff9a0d4725d5f5b12fac1e393e88f98a2 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 30 Nov 2021 18:53:28 -0500 Subject: [PATCH 134/142] 8202 fix progress bar IllegalArgumentException --- Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java | 1 - 1 file changed, 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 1bfb817cde..47b4d9b601 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -1391,7 +1391,6 @@ final class IngestJobExecutor { } else { fileIngestProgressBar.progress(fileName, (int) estimatedFilesToProcess); } - fileIngestProgressBar.progress(fileName, (int) processedFiles); filesInProgress.add(fileName); }); } From a771caf3b77bfc1e71fc85116812e97dcf26d9e4 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Tue, 30 Nov 2021 19:10:36 -0500 Subject: [PATCH 135/142] fixes --- .../sleuthkit/autopsy/datamodel/Bundle.properties-MERGED | 8 ++++---- .../autopsy/mainui/datamodel/DataArtifactDAO.java | 7 ++++--- .../sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java | 3 ++- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED index 40a7fc2c77..076ff00d47 100755 --- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED @@ -300,10 +300,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window ImageNode.createSheet.name.name=Name ImageNode.createSheet.name.displayName=Name ImageNode.createSheet.name.desc=no description -Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null! -Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""! -Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0} -Installer.tskLibErr.err=Fatal Error! +Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\! +Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\! +Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0} +Installer.tskLibErr.err=Fatal Error\! InterestingHits.interestingItems.text=INTERESTING ITEMS InterestingHits.displayName.text=Interesting Items InterestingHits.createSheet.name.name=Name diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 0a89b16db3..076f430992 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -154,10 +154,11 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { Map typeCounts = getCounts(BlackboardArtifact.Category.DATA_ARTIFACT, dataSourceId); List> treeItemRows = typeCounts.entrySet().stream() .map(entry -> { - return createDataArtifactTreeItem(entry.getKey(), dataSourceId, - indeterminateTypes.contains(entry.getKey()) + TreeDisplayCount displayCount = indeterminateTypes.contains(entry.getKey()) ? TreeDisplayCount.INDETERMINATE - : TreeDisplayCount.getDeterminate(entry.getValue())); + : TreeDisplayCount.getDeterminate(entry.getValue()); + + return createDataArtifactTreeItem(entry.getKey(), dataSourceId, displayCount); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) .collect(Collectors.toList()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java index 96e5232600..74baf01e6f 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeChildFactory.java @@ -96,7 +96,8 @@ public abstract class TreeChildFactory extends ChildFactory.Detachable(curItemsList); } // update existing cached nodes From 7410994ec79a3ec2131ca312339bf268d4b4f7b7 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Wed, 1 Dec 2021 12:26:10 -0500 Subject: [PATCH 136/142] fix --- .../mainui/datamodel/Bundle.properties-MERGED | 1 + .../mainui/datamodel/DataArtifactDAO.java | 21 +++++- .../mainui/nodes/DataArtifactTypeFactory.java | 64 +++++++++---------- 3 files changed, 49 insertions(+), 37 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED index 2411b60cd8..9bd4e34ab9 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED @@ -36,6 +36,7 @@ BlackboardArtifactDAO.columnKeys.srcFile.displayName=Source Name BlackboardArtifactDAO.columnKeys.srcFile.name=Source Name CommAccounts.name.text=Communication Accounts CommAccountsDAO.fileColumns.noDescription=No Description +DataArtifactDAO_Accounts_displayName=Communication Accounts FileExtDocumentFilter_html_displayName=HTML FileExtDocumentFilter_office_displayName=Office FileExtDocumentFilter_pdf_displayName=PDF diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index 076f430992..e01d4e242d 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -40,6 +40,7 @@ import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; +import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; @@ -57,6 +58,9 @@ import org.sleuthkit.datamodel.TskCoreException; /** * DAO for providing data about data artifacts to populate the results viewer. */ +@NbBundle.Messages({ + "DataArtifactDAO_Accounts_displayName=Communication Accounts" +}) public class DataArtifactDAO extends BlackboardArtifactDAO { private static Logger logger = Logger.getLogger(DataArtifactDAO.class.getName()); @@ -157,7 +161,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { TreeDisplayCount displayCount = indeterminateTypes.contains(entry.getKey()) ? TreeDisplayCount.INDETERMINATE : TreeDisplayCount.getDeterminate(entry.getValue()); - + return createDataArtifactTreeItem(entry.getKey(), dataSourceId, displayCount); }) .sorted(Comparator.comparing(countRow -> countRow.getDisplayName())) @@ -226,12 +230,25 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { .collect(Collectors.toSet()); } + /** + * Returns the display name for an artifact type. + * + * @param artifactType The artifact type. + * + * @return The display name. + */ + public String getDisplayName(BlackboardArtifact.Type artifactType) { + return artifactType.getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() + ? Bundle.DataArtifactDAO_Accounts_displayName() + : artifactType.getDisplayName(); + } + private TreeItemDTO createDataArtifactTreeItem(BlackboardArtifact.Type artifactType, Long dataSourceId, TreeDisplayCount displayCount) { return new TreeResultsDTO.TreeItemDTO<>( BlackboardArtifact.Category.DATA_ARTIFACT.name(), new DataArtifactSearchParam(artifactType, dataSourceId), artifactType.getTypeID(), - artifactType.getDisplayName(), + getDisplayName(artifactType), displayCount); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index 56b56979a7..acace1f106 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -39,7 +39,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact; * Factory for displaying data artifact types in the tree. */ public class DataArtifactTypeFactory extends TreeChildFactory { - + private final Long dataSourceId; /** @@ -51,11 +51,12 @@ public class DataArtifactTypeFactory extends TreeChildFactory getChildResults() throws IllegalArgumentException, ExecutionException { return MainDAO.getInstance().getDataArtifactsDAO().getDataArtifactCounts(dataSourceId); } - + @Override protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { if (rowData.getSearchParams().getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()) { @@ -64,40 +65,33 @@ public class DataArtifactTypeFactory extends TreeChildFactory getOrCreateRelevantChild(TreeEvent treeEvt) { - + TreeItemDTO originalTreeItem = super.getTypedTreeItem(treeEvt, DataArtifactSearchParam.class); - + if (originalTreeItem != null && !DataArtifactDAO.getIgnoredTreeTypes().contains(originalTreeItem.getSearchParams().getArtifactType()) && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { - + DataArtifactSearchParam searchParam = originalTreeItem.getSearchParams(); return new TreeItemDTO<>( BlackboardArtifact.Category.DATA_ARTIFACT.name(), - new DataArtifactSearchParam(searchParam.getArtifactType(), searchParam.getDataSourceId()), + new DataArtifactSearchParam(searchParam.getArtifactType(), this.dataSourceId), searchParam.getArtifactType().getTypeID(), - searchParam.getArtifactType().getDisplayName(), + MainDAO.getInstance().getDataArtifactsDAO().getDisplayName(searchParam.getArtifactType()), originalTreeItem.getDisplayCount()); } return null; } - + @Override public int compare(DataArtifactSearchParam o1, DataArtifactSearchParam o2) { - String displayName1 = o1.getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() - ? Bundle.DataArtifactTypeFactory_AccountTypeParentNode_displayName() - : o1.getArtifactType().getDisplayName(); - - String displayName2 = o2.getArtifactType().getTypeID() == BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID() - ? Bundle.DataArtifactTypeFactory_AccountTypeParentNode_displayName() - : o2.getArtifactType().getDisplayName(); - - return displayName1.compareToIgnoreCase(displayName2); + DataArtifactDAO dao = MainDAO.getInstance().getDataArtifactsDAO(); + return dao.getDisplayName(o1.getArtifactType()).compareToIgnoreCase(dao.getDisplayName(o2.getArtifactType())); } - + private static String getIconPath(BlackboardArtifact.Type artType) { String iconPath = IconsUtil.getIconFilePath(artType.getTypeID()); return iconPath != null && iconPath.charAt(0) == '/' ? iconPath.substring(1) : iconPath; @@ -107,13 +101,13 @@ public class DataArtifactTypeFactory extends TreeChildFactory { - + public DataArtifactTypeTreeNode(TreeResultsDTO.TreeItemDTO itemData) { super(itemData.getSearchParams().getArtifactType().getTypeName(), getIconPath(itemData.getSearchParams().getArtifactType()), itemData); } - + @Override public void respondSelection(DataResultTopComponent dataResultPanel) { dataResultPanel.displayDataArtifact(this.getItemData().getSearchParams()); @@ -160,19 +154,19 @@ public class DataArtifactTypeFactory extends TreeChildFactory prevData, TreeItemDTO curData) { super.updateDisplayName(prevData, createTitledData(curData)); } - + } /** * Factory for displaying account types. */ static class AccountTypeFactory extends TreeChildFactory { - + private final Long dataSourceId; /** @@ -185,32 +179,32 @@ public class DataArtifactTypeFactory extends TreeChildFactory getChildResults() throws IllegalArgumentException, ExecutionException { return MainDAO.getInstance().getCommAccountsDAO().getAccountsCounts(this.dataSourceId); } - + @Override protected TreeNode createNewNode(TreeResultsDTO.TreeItemDTO rowData) { return new AccountTypeNode(rowData); } - + @Override protected TreeItemDTO getOrCreateRelevantChild(TreeEvent treeEvt) { - + TreeItemDTO originalTreeItem = getTypedTreeItem(treeEvt, CommAccountsSearchParams.class); - + if (originalTreeItem != null && (this.dataSourceId == null || Objects.equals(this.dataSourceId, originalTreeItem.getSearchParams().getDataSourceId()))) { CommAccountsSearchParams searchParam = originalTreeItem.getSearchParams(); return TreeChildFactory.createTreeItemDTO(originalTreeItem, - new CommAccountsSearchParams(searchParam.getType(), searchParam.getDataSourceId())); + new CommAccountsSearchParams(searchParam.getType(), this.dataSourceId)); } - + return null; } - + @Override public int compare(CommAccountsSearchParams o1, CommAccountsSearchParams o2) { return o1.getType().getDisplayName().compareToIgnoreCase(o2.getType().getDisplayName()); @@ -221,7 +215,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory { - + private static final String ICON_BASE_PATH = "org/sleuthkit/autopsy/images/"; //NON-NLS /** @@ -230,7 +224,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory Date: Thu, 2 Dec 2021 11:26:25 -0500 Subject: [PATCH 137/142] test fixes --- .../mainui/datamodel/TableSearchTest.java | 118 +++++++++--------- 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java index b239386e44..676fdc462c 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java @@ -537,28 +537,28 @@ public class TableSearchTest extends NbTestCase { DataArtifactSearchParam param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_CONTACT, null); DataArtifactDAO dataArtifactDAO = MainDAO.getInstance().getDataArtifactsDAO(); - DataArtifactTableSearchResultsDTO results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + DataArtifactTableSearchResultsDTO results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_CONTACT, results.getArtifactType()); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get contacts from data source 2 param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_CONTACT, dataSource2.getId()); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_CONTACT, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get bookmarks from data source 2 param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, dataSource2.getId()); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, results.getArtifactType()); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get all custom artifacts param = new DataArtifactSearchParam(customDataArtifactType, null); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, null); assertEquals(customDataArtifactType, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -595,7 +595,7 @@ public class TableSearchTest extends NbTestCase { // Get the first page param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null); - results = dataArtifactDAO.getDataArtifactsForTable(param, 0, pageSize, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, 0, pageSize); assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount()); assertEquals(pageSize.longValue(), results.getItems().size()); @@ -611,7 +611,7 @@ public class TableSearchTest extends NbTestCase { // Get the second page param = new DataArtifactSearchParam(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, null); - results = dataArtifactDAO.getDataArtifactsForTable(param, pageSize, pageSize, false); + results = dataArtifactDAO.getDataArtifactsForTable(param, pageSize, pageSize); assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK, results.getTotalResultsCount()); assertEquals(ARTIFACT_COUNT_WEB_BOOKMARK - pageSize, results.getItems().size()); @@ -639,19 +639,19 @@ public class TableSearchTest extends NbTestCase { // Get emails from all data sources CommAccountsSearchParams param = new CommAccountsSearchParams(Account.Type.EMAIL, null); - SearchResultsDTO results = commAccountsDAO.getCommAcounts(param, 0, null, false); + SearchResultsDTO results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get device accounts from data source 1 param = new CommAccountsSearchParams(Account.Type.DEVICE, dataSource1.getId()); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get email accounts from data source 2 param = new CommAccountsSearchParams(Account.Type.EMAIL, dataSource2.getId()); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -671,13 +671,13 @@ public class TableSearchTest extends NbTestCase { // Get phone accounts from all data sources param = new CommAccountsSearchParams(Account.Type.PHONE, null); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get phone accounts from data source 2 param = new CommAccountsSearchParams(Account.Type.PHONE, dataSource2.getId()); - results = commAccountsDAO.getCommAcounts(param, 0, null, false); + results = commAccountsDAO.getCommAcounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -705,37 +705,37 @@ public class TableSearchTest extends NbTestCase { // Get plain text files from data source 1 FileTypeMimeSearchParams param = new FileTypeMimeSearchParams("text/plain", dataSource1.getId()); - SearchResultsDTO results = viewsDAO.getFilesByMime(param, 0, null, false); + SearchResultsDTO results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get jpeg files from data source 1 param = new FileTypeMimeSearchParams("image/jpeg", dataSource1.getId()); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get jpeg files from data source 2 param = new FileTypeMimeSearchParams("image/jpeg", dataSource2.getId()); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Search for mime type that should produce no results param = new FileTypeMimeSearchParams("blah/blah", null); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get plain text files from all data sources param = new FileTypeMimeSearchParams("text/plain", null); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get the custom file by MIME type param = new FileTypeMimeSearchParams(CUSTOM_MIME_TYPE, null); - results = viewsDAO.getFilesByMime(param, 0, null, false); + results = viewsDAO.getFilesByMime(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -760,31 +760,31 @@ public class TableSearchTest extends NbTestCase { // Get "50 - 200MB" files from data source 1 FileTypeSizeSearchParams param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_50_200, dataSource1.getId()); - SearchResultsDTO results = viewsDAO.getFilesBySize(param, 0, null, false); + SearchResultsDTO results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get "200MB - 1GB" files from data source 1 param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_200_1000, dataSource1.getId()); - results = viewsDAO.getFilesBySize(param, 0, null, false); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "200MB - 1GB" files from data source 2 param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_200_1000, dataSource2.getId()); - results = viewsDAO.getFilesBySize(param, 0, null, false); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get "1GB+" files from all data sources param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_1000_, null); - results = viewsDAO.getFilesBySize(param, 0, null, false); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "50 - 200MB" files from all data sources param = new FileTypeSizeSearchParams(FileSizeFilter.SIZE_50_200, null); - results = viewsDAO.getFilesBySize(param, 0, null, false); + results = viewsDAO.getFilesBySize(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); } catch (ExecutionException ex) { @@ -802,25 +802,25 @@ public class TableSearchTest extends NbTestCase { // Get "Tag1" file tags from data source 1 TagsSearchParams param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource1.getId()); - SearchResultsDTO results = tagsDAO.getTags(param, 0, null, false); + SearchResultsDTO results = tagsDAO.getTags(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "Tag2" file tags from data source 1 param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, dataSource1.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get "Tag2" file tags from all data sources param = new TagsSearchParams(tag2, TagsSearchParams.TagType.FILE, null); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get "Tag1" file tags from data source 2 param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.FILE, dataSource2.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -844,19 +844,19 @@ public class TableSearchTest extends NbTestCase { // Get "Tag1" result tags from data source 2 param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource2.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get "Tag2" result tags from data source 1 param = new TagsSearchParams(tag2, TagsSearchParams.TagType.RESULT, dataSource1.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get "Tag1" result tags from data source 1 param = new TagsSearchParams(knownTag1, TagsSearchParams.TagType.RESULT, dataSource1.getId()); - results = tagsDAO.getTags(param, 0, null, false); + results = tagsDAO.getTags(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -883,19 +883,19 @@ public class TableSearchTest extends NbTestCase { // Get OS Accounts from data source 1 OsAccountsSearchParams param = new OsAccountsSearchParams(dataSource1.getId()); - SearchResultsDTO results = accountsDAO.getAccounts(param, 0, null, false); + SearchResultsDTO results = accountsDAO.getAccounts(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // Get OS Accounts from all data sources param = new OsAccountsSearchParams(null); - results = accountsDAO.getAccounts(param, 0, null, false); + results = accountsDAO.getAccounts(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get OS Accounts from data source 1 param = new OsAccountsSearchParams(dataSource2.getId()); - results = accountsDAO.getAccounts(param, 0, null, false); + results = accountsDAO.getAccounts(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -928,21 +928,21 @@ public class TableSearchTest extends NbTestCase { AnalysisResultSearchParam param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, null); AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO(); - AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null, false); + AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, results.getArtifactType()); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get encryption detected artifacts from data source 2 param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, dataSource2.getId()); - results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null); assertEquals(BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get all custom artifacts param = new AnalysisResultSearchParam(customAnalysisResultType, null); - results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, 0, null); assertEquals(customAnalysisResultType, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -977,7 +977,7 @@ public class TableSearchTest extends NbTestCase { // Get the first page param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null); - results = analysisResultDAO.getAnalysisResultsForTable(param, 0, pageSize, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, 0, pageSize); assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount()); assertEquals(pageSize.longValue(), results.getItems().size()); @@ -993,7 +993,7 @@ public class TableSearchTest extends NbTestCase { // Get the second page param = new AnalysisResultSearchParam(BlackboardArtifact.Type.TSK_YARA_HIT, null); - results = analysisResultDAO.getAnalysisResultsForTable(param, pageSize, pageSize, false); + results = analysisResultDAO.getAnalysisResultsForTable(param, pageSize, pageSize); assertEquals(ARTIFACT_COUNT_YARA, results.getTotalResultsCount()); assertEquals(ARTIFACT_COUNT_YARA - pageSize, results.getItems().size()); @@ -1017,13 +1017,13 @@ public class TableSearchTest extends NbTestCase { // Test hash set hits AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO(); HashHitSearchParam hashParam = new HashHitSearchParam(null, HASH_SET_1); - AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null, false); + AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_HASHSET_HIT, results.getArtifactType()); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); hashParam = new HashHitSearchParam(dataSource2.getId(), HASH_SET_1); - results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null, false); + results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_HASHSET_HIT, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -1056,13 +1056,13 @@ public class TableSearchTest extends NbTestCase { // Test keyword set hits AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO(); KeywordHitSearchParam kwParam = new KeywordHitSearchParam(null, KEYWORD_SET_1, "", ""); - AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null, false); + AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_KEYWORD_HIT, results.getArtifactType()); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); kwParam = new KeywordHitSearchParam(dataSource2.getId(), KEYWORD_SET_1, "", ""); - results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null, false); + results = analysisResultDAO.getKeywordHitsForTable(kwParam, 0, null); assertEquals(BlackboardArtifact.Type.TSK_KEYWORD_HIT, results.getArtifactType()); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -1100,43 +1100,43 @@ public class TableSearchTest extends NbTestCase { // Get all text documents from data source 1 FileTypeExtensionsSearchParams param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_DOCUMENT_FILTER, dataSource1.getId()); - SearchResultsDTO results = viewsDAO.getFilesByExtension(param, 0, null, false); + SearchResultsDTO results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // Get Word documents from data source 1 param = new FileTypeExtensionsSearchParams(FileExtDocumentFilter.AUT_DOC_OFFICE, dataSource1.getId()); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get image/jpeg files from data source 1 param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_IMAGE_FILTER, dataSource1.getId()); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); // Get text documents from all data sources param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_DOCUMENT_FILTER, null); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(4, results.getTotalResultsCount()); assertEquals(4, results.getItems().size()); // Get jpeg files from data source 2 param = new FileTypeExtensionsSearchParams(FileExtRootFilter.TSK_IMAGE_FILTER, dataSource2.getId()); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Search for file extensions that should produce no results param = new FileTypeExtensionsSearchParams(CustomRootFilter.EMPTY_RESULT_SET_FILTER, null); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(0, results.getTotalResultsCount()); assertEquals(0, results.getItems().size()); // Get the custom file by extension param = new FileTypeExtensionsSearchParams(CustomRootFilter.CUSTOM_FILTER, null); - results = viewsDAO.getFilesByExtension(param, 0, null, false); + results = viewsDAO.getFilesByExtension(param, 0, null); assertEquals(1, results.getTotalResultsCount()); assertEquals(1, results.getItems().size()); @@ -1161,13 +1161,13 @@ public class TableSearchTest extends NbTestCase { // There are 4 hosts not associated with a person FileSystemPersonSearchParam personParam = new FileSystemPersonSearchParam(null); - BaseSearchResultsDTO results = fileSystemDAO.getHostsForTable(personParam, 0, null, false); + BaseSearchResultsDTO results = fileSystemDAO.getHostsForTable(personParam, 0, null); assertEquals(4, results.getTotalResultsCount()); assertEquals(4, results.getItems().size()); // Person1 is associated with two hosts personParam = new FileSystemPersonSearchParam(person1.getPersonId()); - results = fileSystemDAO.getHostsForTable(personParam, 0, null, false); + results = fileSystemDAO.getHostsForTable(personParam, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); @@ -1177,43 +1177,43 @@ public class TableSearchTest extends NbTestCase { // HostA is associated with two images FileSystemHostSearchParam hostParam = new FileSystemHostSearchParam(fsTestHostA.getHostId()); - results = fileSystemDAO.getContentForTable(hostParam, 0, null, false); + results = fileSystemDAO.getContentForTable(hostParam, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // ImageA has one volume system child, which has three volumes that will be displayed FileSystemContentSearchParam param = new FileSystemContentSearchParam(fsTestImageA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // VsA has three volume children (this should match the previous search) param = new FileSystemContentSearchParam(fsTestVsA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // VolumeA1 has a file system child, which in turn has a root directory child with three file children param = new FileSystemContentSearchParam(fsTestVolumeA1.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // FsA has a root directory child with three file children (this should match the previous search) param = new FileSystemContentSearchParam(fsTestFsA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // The root dir contains three files param = new FileSystemContentSearchParam(fsTestRootDirA.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); // ImageB has VS (which will display one volume), pool, and one local file children param = new FileSystemContentSearchParam(fsTestImageB.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(3, results.getTotalResultsCount()); assertEquals(3, results.getItems().size()); @@ -1224,13 +1224,13 @@ public class TableSearchTest extends NbTestCase { // fsTestVolumeB1 has pool and one local file children param = new FileSystemContentSearchParam(fsTestVolumeB1.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); // fsTestPoolB has VS (which will display one volume) and local file children param = new FileSystemContentSearchParam(fsTestPoolB.getId()); - results = fileSystemDAO.getContentForTable(param, 0, null, false); + results = fileSystemDAO.getContentForTable(param, 0, null); assertEquals(2, results.getTotalResultsCount()); assertEquals(2, results.getItems().size()); From 79fa09a3a0f67b45d9027f95aebc64fa8cd2bc63 Mon Sep 17 00:00:00 2001 From: apriestman Date: Thu, 2 Dec 2021 12:29:56 -0500 Subject: [PATCH 138/142] Use unique names for content nodes --- .../autopsy/mainui/nodes/DirectoryNode.java | 1 + .../autopsy/mainui/nodes/FileNode.java | 2 +- .../mainui/nodes/FileSystemFactory.java | 33 ++++++------------- .../autopsy/mainui/nodes/ImageNode.java | 1 + .../autopsy/mainui/nodes/OsAccountNode.java | 2 +- .../autopsy/mainui/nodes/PoolNode.java | 1 + .../mainui/nodes/SpecialDirectoryNode.java | 1 + .../autopsy/mainui/nodes/VolumeNode.java | 1 + 8 files changed, 17 insertions(+), 25 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java index 58f917790d..ddea64e9d3 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java @@ -41,6 +41,7 @@ public class DirectoryNode extends BaseNode { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setIcon(); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java index 19adb923f1..790b75d485 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileNode.java @@ -59,8 +59,8 @@ public class FileNode extends AbstractNode implements ActionContext { // GVDTODO: at some point, this leaf will need to allow for children super(Children.LEAF, ContentNodeUtil.getLookup(file.getAbstractFile())); setIcon(file); - setDisplayName(ContentNodeUtil.getContentDisplayName(file.getFileName())); setName(ContentNodeUtil.getContentName(file.getId())); + setDisplayName(ContentNodeUtil.getContentDisplayName(file.getFileName())); setShortDescription(ContentNodeUtil.getContentDisplayName(file.getFileName())); this.directoryBrowseMode = directoryBrowseMode; this.fileData = file; diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java index 1d37b9a889..1223132df5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/FileSystemFactory.java @@ -18,7 +18,6 @@ */ package org.sleuthkit.autopsy.mainui.nodes; -import java.beans.PropertyChangeEvent; import java.util.Optional; import org.openide.nodes.Children; import org.openide.nodes.Node; @@ -27,7 +26,6 @@ import java.util.logging.Level; import javax.swing.Action; import org.openide.util.Lookup; import org.openide.util.NbBundle; -import org.python.google.common.primitives.Longs; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; @@ -35,8 +33,6 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.FileTypeExtensions; import org.sleuthkit.autopsy.directorytree.ExtractUnallocAction; import org.sleuthkit.autopsy.directorytree.FileSystemDetailsAction; -import org.sleuthkit.autopsy.ingest.IngestManager; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.FileSystemColumnUtils; import org.sleuthkit.autopsy.mainui.datamodel.MediaTypeUtils; @@ -217,8 +213,8 @@ public class FileSystemFactory extends TreeChildFactory implements ActionContext { - protected FileSystemTreeNode(String nodeName, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { - super(nodeName, icon, itemData, children, lookup); + protected FileSystemTreeNode(String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { + super(ContentNodeUtil.getContentName(itemData.getSearchParams().getContentObjectId()), icon, itemData, children, lookup); } protected static Children createChildrenForContent(Long contentId) { @@ -254,8 +250,7 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(itemData.getDisplayName(), - NodeIconUtil.IMAGE.getPath(), + super(NodeIconUtil.IMAGE.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), ContentNodeUtil.getLookup(image)); @@ -290,8 +285,7 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(FileSystemColumnUtils.getVolumeDisplayName(volume), - NodeIconUtil.VOLUME.getPath(), + super(NodeIconUtil.VOLUME.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), ContentNodeUtil.getLookup(volume)); @@ -322,8 +316,7 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(itemData.getDisplayName(), - NodeIconUtil.VOLUME.getPath(), + super(NodeIconUtil.VOLUME.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), ContentNodeUtil.getLookup(pool)); @@ -340,8 +333,7 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(itemData.getDisplayName(), - getDirectoryIcon(dir), + super(getDirectoryIcon(dir), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), ContentNodeUtil.getLookup(dir)); @@ -390,8 +382,8 @@ public class FileSystemFactory extends TreeChildFactory itemData, Children children, Lookup lookup) { - super(nodeName, icon, itemData, children, lookup); + protected SpecialDirectoryTreeNode(AbstractFile dir, String icon, TreeResultsDTO.TreeItemDTO itemData, Children children, Lookup lookup) { + super(icon, itemData, children, lookup); this.dir = dir; } @@ -415,7 +407,6 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(dir, - itemData.getDisplayName(), NodeIconUtil.FOLDER.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), @@ -436,7 +427,6 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(localFilesDataSource, - itemData.getDisplayName(), NodeIconUtil.VOLUME.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), @@ -457,7 +447,6 @@ public class FileSystemFactory extends TreeChildFactory itemData) { super(dir, - itemData.getDisplayName(), NodeIconUtil.VIRTUAL_DIRECTORY.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), @@ -474,8 +463,7 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(itemData.getDisplayName(), - getFileIcon(file), + super(getFileIcon(file), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), ContentNodeUtil.getLookup(file)); @@ -561,8 +549,7 @@ public class FileSystemFactory extends TreeChildFactory itemData) { - super(Bundle.FileSystemFactory_UnsupportedTreeNode_displayName(), - NodeIconUtil.FILE.getPath(), + super(NodeIconUtil.FILE.getPath(), itemData, createChildrenForContent(itemData.getSearchParams().getContentObjectId()), getDefaultLookup(itemData)); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java index ad506522c9..5b9c900689 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ImageNode.java @@ -41,6 +41,7 @@ public class ImageNode extends BaseNode { */ public ImageNode(SearchResultsDTO results, ImageRowDTO row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); setIconBaseWithExtension(NodeIconUtil.IMAGE.getPath()); //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java index 92dc258f82..667d062251 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/OsAccountNode.java @@ -39,7 +39,7 @@ public class OsAccountNode extends BaseNode{ results, rowData); String name = rowData.getContent().getName(); - setName(name); + setName(ContentNodeUtil.getContentName(rowData.getContent().getId())); setDisplayName(name); setShortDescription(name); setIconBaseWithExtension(ICON_PATH); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java index 53c0e9ccf1..198a13a124 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/PoolNode.java @@ -41,6 +41,7 @@ public class PoolNode extends BaseNode { results, row); String name = row.getContent().getType().getName(); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(name); setShortDescription(name); setIconBaseWithExtension(NodeIconUtil.POOL.getPath()); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java index 203da8b4f2..0ad6a615a8 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SpecialDirectoryNode.java @@ -43,6 +43,7 @@ abstract class SpecialDirectoryNode extends BaseNode row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java index 4971b1700a..99d600bf6c 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/VolumeNode.java @@ -49,6 +49,7 @@ public class VolumeNode extends BaseNode { ? row.getCellValues().get(0).toString() : ""; + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(displayName); setShortDescription(displayName); } From d51db0366c335f81a54e972a6070e86bc096bcef Mon Sep 17 00:00:00 2001 From: apriestman Date: Thu, 2 Dec 2021 12:32:24 -0500 Subject: [PATCH 139/142] Fix inconsistent ordering --- Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java index ddea64e9d3..8b101d9244 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DirectoryNode.java @@ -39,9 +39,9 @@ public class DirectoryNode extends BaseNode { */ public DirectoryNode(SearchResultsDTO results, DirectoryRowDTO row) { super(Children.LEAF, ContentNodeUtil.getLookup(row.getContent()), results, row); + setName(ContentNodeUtil.getContentName(row.getContent().getId())); setDisplayName(row.getContent().getName()); setShortDescription(row.getContent().getName()); - setName(ContentNodeUtil.getContentName(row.getContent().getId())); setIcon(); } From b7b70dabe761224bc3353ebbee401cca74fbbe5a Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 3 Dec 2021 16:25:24 -0500 Subject: [PATCH 140/142] move utility code to new class --- .../autopsy/mainui/datamodel/AbstractDAO.java | 65 ----------- .../mainui/datamodel/CommAccountsDAO.java | 8 +- .../mainui/datamodel/DataArtifactDAO.java | 6 +- .../autopsy/mainui/datamodel/SubDAOUtils.java | 110 ++++++++++++++++++ 4 files changed, 116 insertions(+), 73 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index 9172770c47..19ffc70bf5 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -73,69 +73,4 @@ abstract class AbstractDAO { * @return The categories that require a tree refresh. */ abstract Set shouldRefreshTree(); - - /** - * Using a digest of event information, clears keys in a cache that may be - * effected by events. - * - * @param cache The cache. - * @param getKeys Using a key from a cache, provides a tuple - * of the relevant key in the data source - * mapping and the data source id (or null if - * no data source filtering). - * @param itemDataSourceMapping The event digest. - */ - static void invalidateKeys(Cache, ?> cache, Function> getKeys, Map> itemDataSourceMapping) { - invalidateKeys(cache, getKeys, Collections.singletonList(itemDataSourceMapping)); - } - - /** - * Using a digest of event information, clears keys in a cache that may be - * effected by events. - * - * @param cache The cache. - * @param getKeys Using a key from a cache, provides a tuple - * of the relevant key in the data source - * mapping and the data source id (or null if - * no data source filtering). - * @param itemDataSourceMapping The list of event digests. - */ - static void invalidateKeys(Cache, ?> cache, Function> getKeys, List>> itemDataSourceMapping) { - ConcurrentMap, ?> concurrentMap = cache.asMap(); - concurrentMap.forEach((k, v) -> { - Pair pairItems = getKeys.apply(k.getParamData()); - T searchParamsKey = pairItems.getLeft(); - Long searchParamsDsId = pairItems.getRight(); - for (Map> itemDsMapping : itemDataSourceMapping) { - Set dsIds = itemDsMapping.get(searchParamsKey); - if (dsIds != null && (searchParamsDsId == null || dsIds.contains(searchParamsDsId))) { - concurrentMap.remove(k); - } - } - }); - } - - /** - * Returns a set of tree events gathered from the TreeCounts instance after calling flushEvents. - * @param treeCounts The tree counts instance. - * @param converter The means of acquiring a tree item dto to be placed in the TreeEvent. - * @return The generated tree events. - */ - static Set getIngestCompleteEvents(TreeCounts treeCounts, Function> converter) { - return treeCounts.flushEvents().stream() - .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) - .collect(Collectors.toSet()); - } - - /** - * Returns a set of tree events gathered from the TreeCounts instance after calling getEventTimeouts. - * @param treeCounts The tree counts instance. - * @param converter The means of acquiring a tree item dto to be placed in the TreeEvent. - * @return The generated tree events. - */ - static Set getRefreshEvents(TreeCounts treeCounts, Function> converter) { - return treeCounts.getEventTimeouts().stream() - .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) - .collect(Collectors.toSet()); - } } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java index 2b3e3e3dcb..186817ab24 100755 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/CommAccountsDAO.java @@ -43,8 +43,6 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import static org.sleuthkit.autopsy.mainui.datamodel.AbstractDAO.getIngestCompleteEvents; -import static org.sleuthkit.autopsy.mainui.datamodel.AbstractDAO.getRefreshEvents; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeDisplayCount; import org.sleuthkit.autopsy.mainui.datamodel.events.CommAccountsEvent; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; @@ -236,7 +234,7 @@ public class CommAccountsDAO extends AbstractDAO { @Override Set handleIngestComplete() { - return getIngestCompleteEvents( + return SubDAOUtils.getIngestCompleteEvents( this.accountCounts, (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED) ); @@ -244,7 +242,7 @@ public class CommAccountsDAO extends AbstractDAO { @Override Set shouldRefreshTree() { - return getRefreshEvents( + return SubDAOUtils.getRefreshEvents( this.accountCounts, (daoEvt) -> createAccountTreeItem(daoEvt.getAccountType(), daoEvt.getDataSourceId(), TreeResultsDTO.TreeDisplayCount.UNSPECIFIED) ); @@ -286,7 +284,7 @@ public class CommAccountsDAO extends AbstractDAO { return Collections.emptySet(); } - super.invalidateKeys(this.searchParamsCache, + SubDAOUtils.invalidateKeys(this.searchParamsCache, (sp) -> Pair.of(sp.getType(), sp.getDataSourceId()), accountTypeMap); List accountEvents = new ArrayList<>(); diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java index e01d4e242d..4728c27010 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/DataArtifactDAO.java @@ -210,7 +210,7 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { return Collections.emptySet(); } - super.invalidateKeys(this.dataArtifactCache, (sp) -> Pair.of(sp.getArtifactType(), sp.getDataSourceId()), artifactTypeDataSourceMap); + SubDAOUtils.invalidateKeys(this.dataArtifactCache, (sp) -> Pair.of(sp.getArtifactType(), sp.getDataSourceId()), artifactTypeDataSourceMap); // gather dao events based on artifacts List dataArtifactEvents = new ArrayList<>(); @@ -254,13 +254,13 @@ public class DataArtifactDAO extends BlackboardArtifactDAO { @Override Set handleIngestComplete() { - return getIngestCompleteEvents(this.treeCounts, + return SubDAOUtils.getIngestCompleteEvents(this.treeCounts, (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); } @Override Set shouldRefreshTree() { - return getRefreshEvents(this.treeCounts, + return SubDAOUtils.getRefreshEvents(this.treeCounts, (daoEvt) -> createDataArtifactTreeItem(daoEvt.getArtifactType(), daoEvt.getDataSourceId(), TreeDisplayCount.UNSPECIFIED)); } diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java new file mode 100644 index 0000000000..53e2f7cfb3 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/SubDAOUtils.java @@ -0,0 +1,110 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2021 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.mainui.datamodel; + +import com.google.common.cache.Cache; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; +import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; + +/** + * Utilities for common actions in the sub DAOs. + */ +public class SubDAOUtils { + + /** + * Using a digest of event information, clears keys in a cache that may be + * effected by events. + * + * @param cache The cache. + * @param getKeys Using a key from a cache, provides a tuple + * of the relevant key in the data source + * mapping and the data source id (or null if + * no data source filtering). + * @param itemDataSourceMapping The event digest. + */ + static void invalidateKeys(Cache, ?> cache, Function> getKeys, Map> itemDataSourceMapping) { + invalidateKeys(cache, getKeys, Collections.singletonList(itemDataSourceMapping)); + } + + /** + * Using a digest of event information, clears keys in a cache that may be + * effected by events. + * + * @param cache The cache. + * @param getKeys Using a key from a cache, provides a tuple + * of the relevant key in the data source + * mapping and the data source id (or null if + * no data source filtering). + * @param itemDataSourceMapping The list of event digests. + */ + static void invalidateKeys(Cache, ?> cache, Function> getKeys, List>> itemDataSourceMapping) { + ConcurrentMap, ?> concurrentMap = cache.asMap(); + concurrentMap.forEach((k, v) -> { + Pair pairItems = getKeys.apply(k.getParamData()); + T searchParamsKey = pairItems.getLeft(); + Long searchParamsDsId = pairItems.getRight(); + for (Map> itemDsMapping : itemDataSourceMapping) { + Set dsIds = itemDsMapping.get(searchParamsKey); + if (dsIds != null && (searchParamsDsId == null || dsIds.contains(searchParamsDsId))) { + concurrentMap.remove(k); + } + } + }); + } + + /** + * Returns a set of tree events gathered from the TreeCounts instance after + * calling flushEvents. + * + * @param treeCounts The tree counts instance. + * @param converter The means of acquiring a tree item dto to be placed in + * the TreeEvent. + * + * @return The generated tree events. + */ + static Set getIngestCompleteEvents(TreeCounts treeCounts, Function> converter) { + return treeCounts.flushEvents().stream() + .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) + .collect(Collectors.toSet()); + } + + /** + * Returns a set of tree events gathered from the TreeCounts instance after + * calling getEventTimeouts. + * + * @param treeCounts The tree counts instance. + * @param converter The means of acquiring a tree item dto to be placed in + * the TreeEvent. + * + * @return The generated tree events. + */ + static Set getRefreshEvents(TreeCounts treeCounts, Function> converter) { + return treeCounts.getEventTimeouts().stream() + .map(daoEvt -> new TreeEvent(converter.apply(daoEvt), true)) + .collect(Collectors.toSet()); + } +} From a6e6af0fce7e9ba1f532dc46daecc0fd32b36d15 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Fri, 3 Dec 2021 16:30:01 -0500 Subject: [PATCH 141/142] import fix --- .../autopsy/mainui/datamodel/AbstractDAO.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java index 19ffc70bf5..4ed213ced3 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AbstractDAO.java @@ -18,19 +18,9 @@ */ package org.sleuthkit.autopsy.mainui.datamodel; -import com.google.common.cache.Cache; import org.sleuthkit.autopsy.mainui.datamodel.events.DAOEvent; import java.beans.PropertyChangeEvent; -import java.util.Collections; -import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; -import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; -import org.sleuthkit.autopsy.mainui.datamodel.events.TreeCounts; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; /** From 35b3d57cc668a1831c8547a8187f71c79d01b593 Mon Sep 17 00:00:00 2001 From: Greg DiCristofaro Date: Mon, 6 Dec 2021 11:56:39 -0500 Subject: [PATCH 142/142] account icon fix --- .../mainui/nodes/DataArtifactTypeFactory.java | 40 +------------------ 1 file changed, 2 insertions(+), 38 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java index acace1f106..4ecbe55a08 100644 --- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java +++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/DataArtifactTypeFactory.java @@ -24,13 +24,13 @@ import org.openide.nodes.Children; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.mainui.datamodel.CommAccountsSearchParams; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; +import org.sleuthkit.autopsy.datamodel.accounts.Accounts; import org.sleuthkit.autopsy.datamodel.utils.IconsUtil; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO; import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam; import org.sleuthkit.autopsy.mainui.datamodel.MainDAO; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO; import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup; -import org.sleuthkit.datamodel.Account; import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO; import org.sleuthkit.autopsy.mainui.datamodel.events.TreeEvent; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -215,42 +215,6 @@ public class DataArtifactTypeFactory extends TreeChildFactory { - - private static final String ICON_BASE_PATH = "org/sleuthkit/autopsy/images/"; //NON-NLS - - /** - * Get the path of the icon for the given Account Type. - * - * @return The path of the icon for the given Account Type. - */ - public static String getAccountIconPath(Account.Type accountType) { - - if (accountType.equals(Account.Type.CREDIT_CARD)) { - return ICON_BASE_PATH + "credit-card.png"; - } else if (accountType.equals(Account.Type.DEVICE)) { - return ICON_BASE_PATH + "image.png"; - } else if (accountType.equals(Account.Type.EMAIL)) { - return ICON_BASE_PATH + "email.png"; - } else if (accountType.equals(Account.Type.FACEBOOK)) { - return ICON_BASE_PATH + "facebook.png"; - } else if (accountType.equals(Account.Type.INSTAGRAM)) { - return ICON_BASE_PATH + "instagram.png"; - } else if (accountType.equals(Account.Type.MESSAGING_APP)) { - return ICON_BASE_PATH + "messaging.png"; - } else if (accountType.equals(Account.Type.PHONE)) { - return ICON_BASE_PATH + "phone.png"; - } else if (accountType.equals(Account.Type.TWITTER)) { - return ICON_BASE_PATH + "twitter.png"; - } else if (accountType.equals(Account.Type.WEBSITE)) { - return ICON_BASE_PATH + "web-file.png"; - } else if (accountType.equals(Account.Type.WHATSAPP)) { - return ICON_BASE_PATH + "WhatsApp.png"; - } else if (accountType.equals(Account.Type.CREDIT_CARD)) { - return ICON_BASE_PATH + "credit-cards.png"; - } else { - return ICON_BASE_PATH + "face.png"; - } - } /** * Main constructor. @@ -259,7 +223,7 @@ public class DataArtifactTypeFactory extends TreeChildFactory itemData) { super(itemData.getSearchParams().getType().getTypeName(), - getAccountIconPath(itemData.getSearchParams().getType()), + Accounts.getIconFilePath(itemData.getSearchParams().getType()), itemData); }