diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java index 21ac5abea0..bd133ab4c2 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/AbstractAbstractFileNode.java @@ -57,6 +57,8 @@ public abstract class AbstractAbstractFileNode extends A if (dotIndex > 0) { String ext = name.substring(dotIndex).toLowerCase(); + // If this is an archive file we will listen for ingest events + // that will notify us when new content has been identified. for (String s : FileTypeExtensions.getArchiveExtensions()) { if (ext.equals(s)) { IngestManager.getInstance().addIngestModuleEventListener(pcl); @@ -69,7 +71,7 @@ public abstract class AbstractAbstractFileNode extends A private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { String eventType = evt.getPropertyName(); - // See if the new file is a child of ours + // Is this a content changed event? if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) { if ((evt.getOldValue() instanceof ModuleContentEvent) == false) { return; @@ -79,16 +81,25 @@ public abstract class AbstractAbstractFileNode extends A return; } Content newContent = (Content) moduleContentEvent.getSource(); + + // Does the event indicate that content has been added to *this* file? if (getContent().getId() == newContent.getId()) { - Children parentsChildren = getParentNode().getChildren(); - if (parentsChildren != null) { - ((ContentChildren)parentsChildren).refreshChildren(); - parentsChildren.getNodesCount(); + // If so, refresh our children. + try { + Children parentsChildren = getParentNode().getChildren(); + if (parentsChildren != null) { + ((ContentChildren)parentsChildren).refreshChildren(); + parentsChildren.getNodesCount(); + } } + catch (NullPointerException ex) { + // Skip + } + } } }; - + // Note: this order matters for the search result, changed it if the order of property headers on the "KeywordSearchNode"changed public static enum AbstractFilePropertyType { diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/VolumeNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/VolumeNode.java index 36bda58844..18bc5f9097 100644 --- a/Core/src/org/sleuthkit/autopsy/datamodel/VolumeNode.java +++ b/Core/src/org/sleuthkit/autopsy/datamodel/VolumeNode.java @@ -18,13 +18,21 @@ */ package org.sleuthkit.autopsy.datamodel; +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.List; import javax.swing.Action; +import org.openide.nodes.Children; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.directorytree.ExplorerNodeActionVisitor; import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; +import org.sleuthkit.autopsy.ingest.IngestManager; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.VirtualDirectory; import org.sleuthkit.datamodel.Volume; /** @@ -59,8 +67,47 @@ public class VolumeNode extends AbstractContentNode { this.setDisplayName(tempVolName); this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/vol-icon.png"); //NON-NLS + // Listen for ingest events so that we can detect new added files (e.g. carved) + IngestManager.getInstance().addIngestModuleEventListener(pcl); + } + private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { + String eventType = evt.getPropertyName(); + + // See if the new file is a child of ours + if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) { + if ((evt.getOldValue() instanceof ModuleContentEvent) == false) { + return; + } + ModuleContentEvent moduleContentEvent = (ModuleContentEvent) evt.getOldValue(); + if ((moduleContentEvent.getSource() instanceof Content) == false) { + return; + } + Content newContent = (Content) moduleContentEvent.getSource(); + + try { + Content parent = newContent.getParent(); + if (parent != null) { + // Is this a new carved file? + if (parent.getName().equals(VirtualDirectory.NAME_CARVED)) { + // Was this new carved file produced from this volume? + if (parent.getParent().getId() == getContent().getId()) { + Children children = getChildren(); + if (children != null) { + ((ContentChildren)children).refreshChildren(); + children.getNodesCount(); + } + } + } + } + } + catch (TskCoreException ex) { + // Do nothing. + } + } + }; + /** * Right click action for volume node * diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java index f00248ecef..20612d7a6a 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java @@ -248,19 +248,27 @@ final class IngestTasksScheduler { } /** - * Clears the task scheduling queues for an ingest job, but does nothing - * about tasks that have already been taken by ingest threads. Those tasks - * will be flushed out when the ingest threads call back with their task - * completed notifications. + * Clears the "upstream" task scheduling queues for an ingest job, but does + * nothing about tasks that have already been shuffled into the concurrently + * accessed blocking queues shared with the ingest threads. Note that tasks + * in the "downstream" queues or already taken by the ingest threads will be + * flushed out when the ingest threads call back with their task completed + * notifications. * * @param job The job for which the tasks are to to canceled. */ synchronized void cancelPendingTasksForIngestJob(DataSourceIngestJob job) { + /** + * This code should not flush the blocking queues that are concurrently + * accessed by the ingest threads. This is because the "lock striping" + * and "weakly consistent" iterators of these collections make it so + * that this code could have a different view of the queues than the + * ingest threads. It does clean out the directory level tasks before + * they are exploded into file tasks. + */ long jobId = job.getId(); this.removeTasksForJob(this.rootDirectoryTasks, jobId); this.removeTasksForJob(this.directoryTasks, jobId); - this.removeTasksForJob(this.pendingFileTasks, jobId); - this.removeTasksForJob(this.pendingDataSourceTasks, jobId); this.shuffleFileTaskQueues(); } @@ -469,7 +477,7 @@ final class IngestTasksScheduler { * @param taskQueue The queue from which to remove the tasks. * @param jobId The id of the job for which the tasks are to be removed. */ - private void removeTasksForJob(Collection taskQueue, long jobId) { + synchronized private void removeTasksForJob(Collection taskQueue, long jobId) { Iterator iterator = taskQueue.iterator(); while (iterator.hasNext()) { IngestTask task = iterator.next();