Merge branch 'collaborative' of https://github.com/sleuthkit/autopsy into hostname_not_ip

This commit is contained in:
Karl Mortensen 2015-07-30 10:22:58 -04:00
commit 5aba79120d
3 changed files with 79 additions and 13 deletions

View File

@ -57,6 +57,8 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
if (dotIndex > 0) { if (dotIndex > 0) {
String ext = name.substring(dotIndex).toLowerCase(); String ext = name.substring(dotIndex).toLowerCase();
// If this is an archive file we will listen for ingest events
// that will notify us when new content has been identified.
for (String s : FileTypeExtensions.getArchiveExtensions()) { for (String s : FileTypeExtensions.getArchiveExtensions()) {
if (ext.equals(s)) { if (ext.equals(s)) {
IngestManager.getInstance().addIngestModuleEventListener(pcl); IngestManager.getInstance().addIngestModuleEventListener(pcl);
@ -69,7 +71,7 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> { private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName(); String eventType = evt.getPropertyName();
// See if the new file is a child of ours // Is this a content changed event?
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) { if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
if ((evt.getOldValue() instanceof ModuleContentEvent) == false) { if ((evt.getOldValue() instanceof ModuleContentEvent) == false) {
return; return;
@ -79,13 +81,22 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
return; return;
} }
Content newContent = (Content) moduleContentEvent.getSource(); Content newContent = (Content) moduleContentEvent.getSource();
// Does the event indicate that content has been added to *this* file?
if (getContent().getId() == newContent.getId()) { if (getContent().getId() == newContent.getId()) {
// If so, refresh our children.
try {
Children parentsChildren = getParentNode().getChildren(); Children parentsChildren = getParentNode().getChildren();
if (parentsChildren != null) { if (parentsChildren != null) {
((ContentChildren)parentsChildren).refreshChildren(); ((ContentChildren)parentsChildren).refreshChildren();
parentsChildren.getNodesCount(); parentsChildren.getNodesCount();
} }
} }
catch (NullPointerException ex) {
// Skip
}
}
} }
}; };

View File

@ -18,13 +18,21 @@
*/ */
package org.sleuthkit.autopsy.datamodel; package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.swing.Action; import javax.swing.Action;
import org.openide.nodes.Children;
import org.openide.nodes.Sheet; import org.openide.nodes.Sheet;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.directorytree.ExplorerNodeActionVisitor; import org.sleuthkit.autopsy.directorytree.ExplorerNodeActionVisitor;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.VirtualDirectory;
import org.sleuthkit.datamodel.Volume; import org.sleuthkit.datamodel.Volume;
/** /**
@ -59,8 +67,47 @@ public class VolumeNode extends AbstractContentNode<Volume> {
this.setDisplayName(tempVolName); this.setDisplayName(tempVolName);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/vol-icon.png"); //NON-NLS this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/vol-icon.png"); //NON-NLS
// Listen for ingest events so that we can detect new added files (e.g. carved)
IngestManager.getInstance().addIngestModuleEventListener(pcl);
} }
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
String eventType = evt.getPropertyName();
// See if the new file is a child of ours
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
if ((evt.getOldValue() instanceof ModuleContentEvent) == false) {
return;
}
ModuleContentEvent moduleContentEvent = (ModuleContentEvent) evt.getOldValue();
if ((moduleContentEvent.getSource() instanceof Content) == false) {
return;
}
Content newContent = (Content) moduleContentEvent.getSource();
try {
Content parent = newContent.getParent();
if (parent != null) {
// Is this a new carved file?
if (parent.getName().equals(VirtualDirectory.NAME_CARVED)) {
// Was this new carved file produced from this volume?
if (parent.getParent().getId() == getContent().getId()) {
Children children = getChildren();
if (children != null) {
((ContentChildren)children).refreshChildren();
children.getNodesCount();
}
}
}
}
}
catch (TskCoreException ex) {
// Do nothing.
}
}
};
/** /**
* Right click action for volume node * Right click action for volume node
* *

View File

@ -248,19 +248,27 @@ final class IngestTasksScheduler {
} }
/** /**
* Clears the task scheduling queues for an ingest job, but does nothing * Clears the "upstream" task scheduling queues for an ingest job, but does
* about tasks that have already been taken by ingest threads. Those tasks * nothing about tasks that have already been shuffled into the concurrently
* will be flushed out when the ingest threads call back with their task * accessed blocking queues shared with the ingest threads. Note that tasks
* completed notifications. * in the "downstream" queues or already taken by the ingest threads will be
* flushed out when the ingest threads call back with their task completed
* notifications.
* *
* @param job The job for which the tasks are to to canceled. * @param job The job for which the tasks are to to canceled.
*/ */
synchronized void cancelPendingTasksForIngestJob(DataSourceIngestJob job) { synchronized void cancelPendingTasksForIngestJob(DataSourceIngestJob job) {
/**
* This code should not flush the blocking queues that are concurrently
* accessed by the ingest threads. This is because the "lock striping"
* and "weakly consistent" iterators of these collections make it so
* that this code could have a different view of the queues than the
* ingest threads. It does clean out the directory level tasks before
* they are exploded into file tasks.
*/
long jobId = job.getId(); long jobId = job.getId();
this.removeTasksForJob(this.rootDirectoryTasks, jobId); this.removeTasksForJob(this.rootDirectoryTasks, jobId);
this.removeTasksForJob(this.directoryTasks, jobId); this.removeTasksForJob(this.directoryTasks, jobId);
this.removeTasksForJob(this.pendingFileTasks, jobId);
this.removeTasksForJob(this.pendingDataSourceTasks, jobId);
this.shuffleFileTaskQueues(); this.shuffleFileTaskQueues();
} }
@ -469,7 +477,7 @@ final class IngestTasksScheduler {
* @param taskQueue The queue from which to remove the tasks. * @param taskQueue The queue from which to remove the tasks.
* @param jobId The id of the job for which the tasks are to be removed. * @param jobId The id of the job for which the tasks are to be removed.
*/ */
private void removeTasksForJob(Collection<? extends IngestTask> taskQueue, long jobId) { synchronized private void removeTasksForJob(Collection<? extends IngestTask> taskQueue, long jobId) {
Iterator<? extends IngestTask> iterator = taskQueue.iterator(); Iterator<? extends IngestTask> iterator = taskQueue.iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
IngestTask task = iterator.next(); IngestTask task = iterator.next();