mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-19 11:07:43 +00:00
Merge branch 'collaborative' of https://github.com/sleuthkit/autopsy into hostname_not_ip
This commit is contained in:
commit
5aba79120d
@ -57,6 +57,8 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
|
||||
if (dotIndex > 0) {
|
||||
String ext = name.substring(dotIndex).toLowerCase();
|
||||
|
||||
// If this is an archive file we will listen for ingest events
|
||||
// that will notify us when new content has been identified.
|
||||
for (String s : FileTypeExtensions.getArchiveExtensions()) {
|
||||
if (ext.equals(s)) {
|
||||
IngestManager.getInstance().addIngestModuleEventListener(pcl);
|
||||
@ -69,7 +71,7 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
|
||||
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
|
||||
String eventType = evt.getPropertyName();
|
||||
|
||||
// See if the new file is a child of ours
|
||||
// Is this a content changed event?
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
|
||||
if ((evt.getOldValue() instanceof ModuleContentEvent) == false) {
|
||||
return;
|
||||
@ -79,16 +81,25 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
|
||||
return;
|
||||
}
|
||||
Content newContent = (Content) moduleContentEvent.getSource();
|
||||
|
||||
// Does the event indicate that content has been added to *this* file?
|
||||
if (getContent().getId() == newContent.getId()) {
|
||||
Children parentsChildren = getParentNode().getChildren();
|
||||
if (parentsChildren != null) {
|
||||
((ContentChildren)parentsChildren).refreshChildren();
|
||||
parentsChildren.getNodesCount();
|
||||
// If so, refresh our children.
|
||||
try {
|
||||
Children parentsChildren = getParentNode().getChildren();
|
||||
if (parentsChildren != null) {
|
||||
((ContentChildren)parentsChildren).refreshChildren();
|
||||
parentsChildren.getNodesCount();
|
||||
}
|
||||
}
|
||||
catch (NullPointerException ex) {
|
||||
// Skip
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Note: this order matters for the search result, changed it if the order of property headers on the "KeywordSearchNode"changed
|
||||
public static enum AbstractFilePropertyType {
|
||||
|
||||
|
@ -18,13 +18,21 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.datamodel;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.swing.Action;
|
||||
import org.openide.nodes.Children;
|
||||
import org.openide.nodes.Sheet;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.directorytree.ExplorerNodeActionVisitor;
|
||||
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.VirtualDirectory;
|
||||
import org.sleuthkit.datamodel.Volume;
|
||||
|
||||
/**
|
||||
@ -59,8 +67,47 @@ public class VolumeNode extends AbstractContentNode<Volume> {
|
||||
this.setDisplayName(tempVolName);
|
||||
|
||||
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/vol-icon.png"); //NON-NLS
|
||||
// Listen for ingest events so that we can detect new added files (e.g. carved)
|
||||
IngestManager.getInstance().addIngestModuleEventListener(pcl);
|
||||
|
||||
}
|
||||
|
||||
private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
|
||||
String eventType = evt.getPropertyName();
|
||||
|
||||
// See if the new file is a child of ours
|
||||
if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
|
||||
if ((evt.getOldValue() instanceof ModuleContentEvent) == false) {
|
||||
return;
|
||||
}
|
||||
ModuleContentEvent moduleContentEvent = (ModuleContentEvent) evt.getOldValue();
|
||||
if ((moduleContentEvent.getSource() instanceof Content) == false) {
|
||||
return;
|
||||
}
|
||||
Content newContent = (Content) moduleContentEvent.getSource();
|
||||
|
||||
try {
|
||||
Content parent = newContent.getParent();
|
||||
if (parent != null) {
|
||||
// Is this a new carved file?
|
||||
if (parent.getName().equals(VirtualDirectory.NAME_CARVED)) {
|
||||
// Was this new carved file produced from this volume?
|
||||
if (parent.getParent().getId() == getContent().getId()) {
|
||||
Children children = getChildren();
|
||||
if (children != null) {
|
||||
((ContentChildren)children).refreshChildren();
|
||||
children.getNodesCount();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (TskCoreException ex) {
|
||||
// Do nothing.
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Right click action for volume node
|
||||
*
|
||||
|
@ -248,19 +248,27 @@ final class IngestTasksScheduler {
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the task scheduling queues for an ingest job, but does nothing
|
||||
* about tasks that have already been taken by ingest threads. Those tasks
|
||||
* will be flushed out when the ingest threads call back with their task
|
||||
* completed notifications.
|
||||
* Clears the "upstream" task scheduling queues for an ingest job, but does
|
||||
* nothing about tasks that have already been shuffled into the concurrently
|
||||
* accessed blocking queues shared with the ingest threads. Note that tasks
|
||||
* in the "downstream" queues or already taken by the ingest threads will be
|
||||
* flushed out when the ingest threads call back with their task completed
|
||||
* notifications.
|
||||
*
|
||||
* @param job The job for which the tasks are to to canceled.
|
||||
*/
|
||||
synchronized void cancelPendingTasksForIngestJob(DataSourceIngestJob job) {
|
||||
/**
|
||||
* This code should not flush the blocking queues that are concurrently
|
||||
* accessed by the ingest threads. This is because the "lock striping"
|
||||
* and "weakly consistent" iterators of these collections make it so
|
||||
* that this code could have a different view of the queues than the
|
||||
* ingest threads. It does clean out the directory level tasks before
|
||||
* they are exploded into file tasks.
|
||||
*/
|
||||
long jobId = job.getId();
|
||||
this.removeTasksForJob(this.rootDirectoryTasks, jobId);
|
||||
this.removeTasksForJob(this.directoryTasks, jobId);
|
||||
this.removeTasksForJob(this.pendingFileTasks, jobId);
|
||||
this.removeTasksForJob(this.pendingDataSourceTasks, jobId);
|
||||
this.shuffleFileTaskQueues();
|
||||
}
|
||||
|
||||
@ -469,7 +477,7 @@ final class IngestTasksScheduler {
|
||||
* @param taskQueue The queue from which to remove the tasks.
|
||||
* @param jobId The id of the job for which the tasks are to be removed.
|
||||
*/
|
||||
private void removeTasksForJob(Collection<? extends IngestTask> taskQueue, long jobId) {
|
||||
synchronized private void removeTasksForJob(Collection<? extends IngestTask> taskQueue, long jobId) {
|
||||
Iterator<? extends IngestTask> iterator = taskQueue.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
IngestTask task = iterator.next();
|
||||
|
Loading…
x
Reference in New Issue
Block a user