Change data source ingest behavior

This commit is contained in:
Richard Cordovano 2014-05-15 15:39:42 -04:00
parent db07da4a0e
commit 99fd68b1c5
25 changed files with 228 additions and 328 deletions

View File

@ -29,7 +29,6 @@
*/ */
package org.sleuthkit.autopsy.examples; package org.sleuthkit.autopsy.examples;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
@ -46,42 +45,38 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
/** /**
* Sample data source ingest module that doesn't do much. Demonstrates per * Sample data source ingest module that doesn't do much. Demonstrates per
* ingest job module settings, use of a subset of the available ingest services * ingest job module settings, checking for job cancellation, updating the
* and thread-safe sharing of per ingest job data. * DataSourceIngestModuleProgress object, and use of a subset of the available
* ingest services.
*/ */
class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { class SampleDataSourceIngestModule implements DataSourceIngestModule {
private static final HashMap<Long, Long> fileCountsForIngestJobs = new HashMap<>();
private final boolean skipKnownFiles; private final boolean skipKnownFiles;
private IngestJobContext context = null; private IngestJobContext context = null;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
SampleDataSourceIngestModule(SampleModuleIngestJobSettings settings) { SampleDataSourceIngestModule(SampleModuleIngestJobSettings settings) {
this.skipKnownFiles = settings.skipKnownFiles(); this.skipKnownFiles = settings.skipKnownFiles();
} }
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context; this.context = context;
// This method is thread-safe with per ingest job reference counted
// management of shared data.
initFileCount(context.getJobId());
} }
@Override @Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
// There are two tasks to do. Set the the progress bar to determinate if (context.isJobCancelled()) {
// and set the remaining number of work units to be completed to two. return IngestModule.ProcessResult.OK;
}
// There are two tasks to do.
progressBar.switchToDeterminate(2); progressBar.switchToDeterminate(2);
Case autopsyCase = Case.getCurrentCase(); Case autopsyCase = Case.getCurrentCase();
SleuthkitCase sleuthkitCase = autopsyCase.getSleuthkitCase(); SleuthkitCase sleuthkitCase = autopsyCase.getSleuthkitCase();
Services services = new Services(sleuthkitCase); Services services = new Services(sleuthkitCase);
@ -93,11 +88,14 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
for (AbstractFile docFile : docFiles) { for (AbstractFile docFile : docFiles) {
if (!skipKnownFiles || docFile.getKnown() != TskData.FileKnown.KNOWN) { if (!skipKnownFiles || docFile.getKnown() != TskData.FileKnown.KNOWN) {
++fileCount; ++fileCount;
} }
} }
progressBar.progress(1); progressBar.progress(1);
if (context.isJobCancelled()) {
return IngestModule.ProcessResult.OK;
}
// Get files by creation time. // Get files by creation time.
long currentTime = System.currentTimeMillis() / 1000; long currentTime = System.currentTimeMillis() / 1000;
long minTime = currentTime - (14 * 24 * 60 * 60); // Go back two weeks. long minTime = currentTime - (14 * 24 * 60 * 60); // Go back two weeks.
@ -105,16 +103,24 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
for (FsContent otherFile : otherFiles) { for (FsContent otherFile : otherFiles) {
if (!skipKnownFiles || otherFile.getKnown() != TskData.FileKnown.KNOWN) { if (!skipKnownFiles || otherFile.getKnown() != TskData.FileKnown.KNOWN) {
++fileCount; ++fileCount;
} }
} }
// This method is thread-safe with per ingest job reference counted
// management of shared data.
addToFileCount(context.getJobId(), fileCount);
progressBar.progress(1); progressBar.progress(1);
return IngestModule.ProcessResult.OK;
if (context.isJobCancelled()) {
return IngestModule.ProcessResult.OK;
}
// Post a message to the ingest messages in box.
String msgText = String.format("Found %d files", fileCount);
IngestMessage message = IngestMessage.createMessage(
IngestMessage.MessageType.DATA,
SampleIngestModuleFactory.getModuleName(),
msgText);
IngestServices.getInstance().postMessage(message);
return IngestModule.ProcessResult.OK;
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
IngestServices ingestServices = IngestServices.getInstance(); IngestServices ingestServices = IngestServices.getInstance();
Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName()); Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName());
@ -122,38 +128,4 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
return IngestModule.ProcessResult.ERROR; return IngestModule.ProcessResult.ERROR;
} }
} }
@Override
public void shutDown(boolean ingestJobCancelled) {
// This method is thread-safe with per ingest job reference counted
// management of shared data.
postFileCount(context.getJobId());
}
synchronized static void initFileCount(long ingestJobId) {
Long refCount = refCounter.incrementAndGet(ingestJobId);
if (refCount == 1) {
fileCountsForIngestJobs.put(ingestJobId, 0L);
}
}
synchronized static void addToFileCount(long ingestJobId, long countToAdd) {
Long fileCount = fileCountsForIngestJobs.get(ingestJobId);
fileCount += countToAdd;
fileCountsForIngestJobs.put(ingestJobId, fileCount);
}
synchronized static void postFileCount(long ingestJobId) {
Long refCount = refCounter.decrementAndGet(ingestJobId);
if (refCount == 0) {
Long filesCount = fileCountsForIngestJobs.remove(ingestJobId);
String msgText = String.format("Found %d files", filesCount);
IngestMessage message = IngestMessage.createMessage(
IngestMessage.MessageType.DATA,
SampleIngestModuleFactory.getModuleName(),
msgText);
IngestServices.getInstance().postMessage(message);
}
}
} }

View File

@ -37,7 +37,6 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
@ -54,7 +53,7 @@ import org.sleuthkit.datamodel.TskData;
* module settings, use of a subset of the available ingest services and * module settings, use of a subset of the available ingest services and
* thread-safe sharing of per ingest job data. * thread-safe sharing of per ingest job data.
*/ */
class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestModule { class SampleFileIngestModule implements FileIngestModule {
private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>(); private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>();
private static int attrId = -1; private static int attrId = -1;
@ -159,21 +158,23 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
} }
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
// This method is thread-safe with per ingest job reference counted if (!context.isJobCancelled()) {
// management of shared data. // This method is thread-safe with per ingest job reference counted
reportBlackboardPostCount(context.getJobId()); // management of shared data.
reportBlackboardPostCount(context.getJobId());
}
} }
synchronized static void addToBlackboardPostCount(long ingestJobId, long countToAdd) { synchronized static void addToBlackboardPostCount(long ingestJobId, long countToAdd) {
Long fileCount = artifactCountsForIngestJobs.get(ingestJobId); Long fileCount = artifactCountsForIngestJobs.get(ingestJobId);
// Ensures that this job has an entry // Ensures that this job has an entry
if (fileCount == null) { if (fileCount == null) {
fileCount = 0L; fileCount = 0L;
artifactCountsForIngestJobs.put(ingestJobId, fileCount); artifactCountsForIngestJobs.put(ingestJobId, fileCount);
} }
fileCount += countToAdd; fileCount += countToAdd;
artifactCountsForIngestJobs.put(ingestJobId, fileCount); artifactCountsForIngestJobs.put(ingestJobId, fileCount);
} }
@ -189,5 +190,5 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
msgText); msgText);
IngestServices.getInstance().postMessage(message); IngestServices.getInstance().postMessage(message);
} }
} }
} }

View File

@ -16,8 +16,9 @@ IngestMessagePanel.totalMessagesNameLabel.text=Total:
IngestMessagePanel.totalMessagesNameVal.text=- IngestMessagePanel.totalMessagesNameVal.text=-
IngestMessagePanel.totalUniqueMessagesNameLabel.text=Unique: IngestMessagePanel.totalUniqueMessagesNameLabel.text=Unique:
IngestMessagePanel.totalUniqueMessagesNameVal.text=- IngestMessagePanel.totalUniqueMessagesNameVal.text=-
IngestJob.progress.dataSourceIngest.displayName=Data Source Ingest of {0} IngestJob.progress.dataSourceIngest.initialDisplayName=Analyzing {0}
IngestJob.progress.fileIngest.displayName=File Ingest of {0} IngestJob.progress.dataSourceIngest.displayName={0} for {1}
IngestJob.progress.fileIngest.displayName=Analyzing files from {0}
IngestJob.progress.cancelling={0} (Cancelling...) IngestJob.progress.cancelling={0} (Cancelling...)
IngestJobConfigurationPanel.processUnallocCheckbox.toolTipText=Processes unallocated space, such as deleted files. Produces more complete results, but it may take longer to process on large images. IngestJobConfigurationPanel.processUnallocCheckbox.toolTipText=Processes unallocated space, such as deleted files. Produces more complete results, but it may take longer to process on large images.
IngestJobConfigurationPanel.processUnallocCheckbox.text=Process Unallocated Space IngestJobConfigurationPanel.processUnallocCheckbox.text=Process Unallocated Space

View File

@ -26,11 +26,9 @@ import org.netbeans.api.progress.ProgressHandle;
public class DataSourceIngestModuleProgress { public class DataSourceIngestModuleProgress {
private final ProgressHandle progress; private final ProgressHandle progress;
private final String moduleDisplayName;
DataSourceIngestModuleProgress(ProgressHandle progress, String moduleDisplayName) { DataSourceIngestModuleProgress(ProgressHandle progress) {
this.progress = progress; this.progress = progress;
this.moduleDisplayName = moduleDisplayName;
} }
/** /**
@ -60,6 +58,6 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module. * @param workUnits Number of work units performed so far by the module.
*/ */
public void progress(int workUnits) { public void progress(int workUnits) {
progress.progress(this.moduleDisplayName, workUnits); progress.progress("", workUnits);
} }
} }

View File

@ -23,6 +23,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandle;
import org.openide.util.NbBundle;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
/** /**
@ -79,7 +80,10 @@ final class DataSourceIngestPipeline {
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
for (DataSourceIngestModuleDecorator module : this.modules) { for (DataSourceIngestModuleDecorator module : this.modules) {
try { try {
module.process(dataSource, new DataSourceIngestModuleProgress(progress, module.getDisplayName())); progress.setDisplayName(NbBundle.getMessage(this.getClass(),
"IngestJob.progress.dataSourceIngest.displayName",
module.getDisplayName(), dataSource.getName()));
module.process(dataSource, new DataSourceIngestModuleProgress(progress));
} catch (Exception ex) { } catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex)); errors.add(new IngestModuleError(module.getDisplayName(), ex));
} }
@ -90,18 +94,6 @@ final class DataSourceIngestPipeline {
return errors; return errors;
} }
List<IngestModuleError> shutDown() {
List<IngestModuleError> errors = new ArrayList<>();
for (DataSourceIngestModuleDecorator module : this.modules) {
try {
module.shutDown(context.isJobCancelled());
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
}
return errors;
}
private static class DataSourceIngestModuleDecorator implements DataSourceIngestModule { private static class DataSourceIngestModuleDecorator implements DataSourceIngestModule {
private final DataSourceIngestModule module; private final DataSourceIngestModule module;
@ -129,10 +121,5 @@ final class DataSourceIngestPipeline {
public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
return module.process(dataSource, statusHelper); return module.process(dataSource, statusHelper);
} }
@Override
public void shutDown(boolean ingestJobWasCancelled) {
module.shutDown(ingestJobWasCancelled);
}
} }
} }

View File

@ -35,6 +35,6 @@ final class DataSourceIngestTask extends IngestTask {
@Override @Override
void execute() throws InterruptedException { void execute() throws InterruptedException {
getIngestJob().process(dataSource); getIngestJob().process(this);
} }
} }

View File

@ -21,17 +21,27 @@ package org.sleuthkit.autopsy.ingest;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
/** /**
* Interface that must be implemented by all file ingest modules. * Interface that must be implemented by all file ingest modules. See
* See description of IngestModule for more details on interface behavior. * description of IngestModule for more details on interface behavior.
*/ */
public interface FileIngestModule extends IngestModule { public interface FileIngestModule extends IngestModule {
/** /**
* Processes a file. Called between calls to startUp() and shutDown(). * Processes a file. Called between calls to startUp() and shutDown(). Will
* Will be called for each file in a data source. * be called for each file in a data source.
* *
* @param file The file to analyze. * @param file The file to analyze.
* @return A result code indicating success or failure of the processing. * @return A result code indicating success or failure of the processing.
*/ */
ProcessResult process(AbstractFile file); ProcessResult process(AbstractFile file);
/**
* Invoked by Autopsy when an ingest job is completed (either because the
* data has been analyzed or because the job was canceled - check
* IngestJobContext.isJobCancelled()), before the ingest module instance is
* discarded. The module should respond by doing things like releasing
* private resources, submitting final results, and posting a final ingest
* message.
*/
void shutDown();
} }

View File

@ -97,7 +97,7 @@ final class FileIngestPipeline {
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
for (FileIngestModuleDecorator module : this.modules) { for (FileIngestModuleDecorator module : this.modules) {
try { try {
module.shutDown(context.isJobCancelled()); module.shutDown();
} catch (Exception ex) { } catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex)); errors.add(new IngestModuleError(module.getDisplayName(), ex));
} }
@ -134,8 +134,8 @@ final class FileIngestPipeline {
} }
@Override @Override
public void shutDown(boolean ingestJobWasCancelled) { public void shutDown() {
module.shutDown(ingestJobWasCancelled); module.shutDown();
} }
} }
} }

View File

@ -36,7 +36,7 @@ final class FileIngestTask extends IngestTask {
@Override @Override
void execute() throws InterruptedException { void execute() throws InterruptedException {
getIngestJob().process(file); getIngestJob().process(this);
} }
@Override @Override

View File

@ -20,7 +20,9 @@ package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level; import java.util.logging.Level;
import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory; import org.netbeans.api.progress.ProgressHandleFactory;
@ -33,21 +35,65 @@ import org.sleuthkit.datamodel.Content;
final class IngestJob { final class IngestJob {
private static final Logger logger = Logger.getLogger(IngestManager.class.getName()); private static final Logger logger = Logger.getLogger(IngestManager.class.getName());
private static final AtomicLong nextIngestJobId = new AtomicLong(0L);
private static final ConcurrentHashMap<Long, IngestJob> ingestJobsById = new ConcurrentHashMap<>();
private static final IngestScheduler taskScheduler = IngestScheduler.getInstance();
private final long id; private final long id;
private final Content rootDataSource; private final Content dataSource;
private final List<IngestModuleTemplate> ingestModuleTemplates; private final List<IngestModuleTemplate> ingestModuleTemplates;
private final boolean processUnallocatedSpace; private final boolean processUnallocatedSpace;
private final LinkedBlockingQueue<DataSourceIngestPipeline> dataSourceIngestPipelines = new LinkedBlockingQueue<>();
private final LinkedBlockingQueue<FileIngestPipeline> fileIngestPipelines = new LinkedBlockingQueue<>(); private final LinkedBlockingQueue<FileIngestPipeline> fileIngestPipelines = new LinkedBlockingQueue<>();
private long estimatedFilesToProcess = 0L; // Guarded by this private long estimatedFilesToProcess = 0L; // Guarded by this
private long processedFiles = 0L; // Guarded by this private long processedFiles = 0L; // Guarded by this
private DataSourceIngestPipeline dataSourceIngestPipeline;
private ProgressHandle dataSourceTasksProgress; private ProgressHandle dataSourceTasksProgress;
private ProgressHandle fileTasksProgress; private ProgressHandle fileTasksProgress;
private volatile boolean cancelled = false; private volatile boolean cancelled = false;
/**
* Creates an ingest job for a data source.
*
* @param dataSource The data source to ingest.
* @param ingestModuleTemplates The ingest module templates to use to create
* the ingest pipelines for the job.
* @param processUnallocatedSpace Whether or not the job should include
* processing of unallocated space.
* @return A collection of ingest module start up errors, empty on success.
* @throws InterruptedException
*/
static List<IngestModuleError> startIngestJob(Content dataSource, List<IngestModuleTemplate> ingestModuleTemplates, boolean processUnallocatedSpace) throws InterruptedException {
long jobId = nextIngestJobId.incrementAndGet();
IngestJob job = new IngestJob(jobId, dataSource, ingestModuleTemplates, processUnallocatedSpace);
ingestJobsById.put(jobId, job);
IngestManager.getInstance().fireIngestJobStarted(jobId);
List<IngestModuleError> errors = job.start();
if (errors.isEmpty()) {
taskScheduler.scheduleTasksForIngestJob(job, dataSource);
} else {
ingestJobsById.remove(jobId);
IngestManager.getInstance().fireIngestJobCancelled(jobId);
}
return errors;
}
static boolean ingestJobsAreRunning() {
for (IngestJob job : ingestJobsById.values()) {
if (!job.isCancelled()) {
return true;
}
}
return false;
}
static void cancelAllIngestJobs() {
for (IngestJob job : ingestJobsById.values()) {
job.cancel();
}
}
IngestJob(long id, Content dataSource, List<IngestModuleTemplate> ingestModuleTemplates, boolean processUnallocatedSpace) { IngestJob(long id, Content dataSource, List<IngestModuleTemplate> ingestModuleTemplates, boolean processUnallocatedSpace) {
this.id = id; this.id = id;
this.rootDataSource = dataSource; this.dataSource = dataSource;
this.ingestModuleTemplates = ingestModuleTemplates; this.ingestModuleTemplates = ingestModuleTemplates;
this.processUnallocatedSpace = processUnallocatedSpace; this.processUnallocatedSpace = processUnallocatedSpace;
} }
@ -60,7 +106,7 @@ final class IngestJob {
return processUnallocatedSpace; return processUnallocatedSpace;
} }
List<IngestModuleError> startUp() throws InterruptedException { List<IngestModuleError> start() throws InterruptedException {
List<IngestModuleError> errors = startUpIngestPipelines(); List<IngestModuleError> errors = startUpIngestPipelines();
if (errors.isEmpty()) { if (errors.isEmpty()) {
startFileIngestProgressBar(); startFileIngestProgressBar();
@ -71,20 +117,12 @@ final class IngestJob {
private List<IngestModuleError> startUpIngestPipelines() throws InterruptedException { private List<IngestModuleError> startUpIngestPipelines() throws InterruptedException {
IngestJobContext context = new IngestJobContext(this); IngestJobContext context = new IngestJobContext(this);
dataSourceIngestPipeline = new DataSourceIngestPipeline(context, ingestModuleTemplates);
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(dataSourceIngestPipeline.startUp());
int numberOfPipelines = IngestManager.getInstance().getNumberOfDataSourceIngestThreads(); int numberOfPipelines = IngestManager.getInstance().getNumberOfFileIngestThreads();
for (int i = 0; i < numberOfPipelines; ++i) {
DataSourceIngestPipeline pipeline = new DataSourceIngestPipeline(context, ingestModuleTemplates);
errors.addAll(pipeline.startUp());
dataSourceIngestPipelines.put(pipeline);
if (!errors.isEmpty()) {
// No need to accumulate presumably redundant errors.
break;
}
}
numberOfPipelines = IngestManager.getInstance().getNumberOfFileIngestThreads();
for (int i = 0; i < numberOfPipelines; ++i) { for (int i = 0; i < numberOfPipelines; ++i) {
FileIngestPipeline pipeline = new FileIngestPipeline(context, ingestModuleTemplates); FileIngestPipeline pipeline = new FileIngestPipeline(context, ingestModuleTemplates);
errors.addAll(pipeline.startUp()); errors.addAll(pipeline.startUp());
@ -96,13 +134,13 @@ final class IngestJob {
} }
logIngestModuleErrors(errors); logIngestModuleErrors(errors);
return errors; // Returned so UI can report to user. return errors;
} }
private void startDataSourceIngestProgressBar() { private void startDataSourceIngestProgressBar() {
final String displayName = NbBundle.getMessage(this.getClass(), final String displayName = NbBundle.getMessage(this.getClass(),
"IngestJob.progress.dataSourceIngest.displayName", "IngestJob.progress.dataSourceIngest.initialDisplayName",
rootDataSource.getName()); dataSource.getName());
dataSourceTasksProgress = ProgressHandleFactory.createHandle(displayName, new Cancellable() { dataSourceTasksProgress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override @Override
public boolean cancel() { public boolean cancel() {
@ -123,7 +161,7 @@ final class IngestJob {
private void startFileIngestProgressBar() { private void startFileIngestProgressBar() {
final String displayName = NbBundle.getMessage(this.getClass(), final String displayName = NbBundle.getMessage(this.getClass(),
"IngestJob.progress.fileIngest.displayName", "IngestJob.progress.fileIngest.displayName",
rootDataSource.getName()); dataSource.getName());
fileTasksProgress = ProgressHandleFactory.createHandle(displayName, new Cancellable() { fileTasksProgress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override @Override
public boolean cancel() { public boolean cancel() {
@ -136,32 +174,32 @@ final class IngestJob {
return true; return true;
} }
}); });
estimatedFilesToProcess = rootDataSource.accept(new GetFilesCountVisitor()); estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor());
fileTasksProgress.start(); fileTasksProgress.start();
fileTasksProgress.switchToDeterminate((int) estimatedFilesToProcess); fileTasksProgress.switchToDeterminate((int) estimatedFilesToProcess);
} }
void process(Content dataSource) throws InterruptedException { void process(DataSourceIngestTask task) throws InterruptedException {
// If the job is not cancelled, complete the task, otherwise just flush // If the job is not cancelled, complete the task, otherwise just flush
// it. In either case, the task counter needs to be decremented and the // it.
// shut down check needs to occur.
if (!isCancelled()) { if (!isCancelled()) {
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
DataSourceIngestPipeline pipeline = dataSourceIngestPipelines.take(); errors.addAll(dataSourceIngestPipeline.process(task.getDataSource(), dataSourceTasksProgress));
errors.addAll(pipeline.process(dataSource, dataSourceTasksProgress));
if (!errors.isEmpty()) { if (!errors.isEmpty()) {
logIngestModuleErrors(errors); logIngestModuleErrors(errors);
} }
dataSourceIngestPipelines.put(pipeline); dataSourceTasksProgress.finish();
}
if (taskScheduler.isLastTaskForIngestJob(task)) {
finish();
} }
} }
void process(AbstractFile file) throws InterruptedException { void process(FileIngestTask task) throws InterruptedException {
// If the job is not cancelled, complete the task, otherwise just flush // If the job is not cancelled, complete the task, otherwise just flush
// it. In either case, the task counter needs to be decremented and the // it.
// shut down check needs to occur.
if (!isCancelled()) { if (!isCancelled()) {
List<IngestModuleError> errors = new ArrayList<>(); AbstractFile file = task.getFile();
synchronized (this) { synchronized (this) {
++processedFiles; ++processedFiles;
if (processedFiles <= estimatedFilesToProcess) { if (processedFiles <= estimatedFilesToProcess) {
@ -171,29 +209,33 @@ final class IngestJob {
} }
} }
FileIngestPipeline pipeline = fileIngestPipelines.take(); FileIngestPipeline pipeline = fileIngestPipelines.take();
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(pipeline.process(file)); errors.addAll(pipeline.process(file));
fileIngestPipelines.put(pipeline); fileIngestPipelines.put(pipeline);
if (!errors.isEmpty()) { if (!errors.isEmpty()) {
logIngestModuleErrors(errors); logIngestModuleErrors(errors);
} }
} }
if (taskScheduler.isLastTaskForIngestJob(task)) {
finish();
}
} }
void shutDown() { private void finish() {
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
while (!dataSourceIngestPipelines.isEmpty()) {
DataSourceIngestPipeline pipeline = dataSourceIngestPipelines.poll();
errors.addAll(pipeline.shutDown());
}
while (!fileIngestPipelines.isEmpty()) { while (!fileIngestPipelines.isEmpty()) {
FileIngestPipeline pipeline = fileIngestPipelines.poll(); FileIngestPipeline pipeline = fileIngestPipelines.poll();
errors.addAll(pipeline.shutDown()); errors.addAll(pipeline.shutDown());
} }
fileTasksProgress.finish(); fileTasksProgress.finish();
dataSourceTasksProgress.finish();
if (!errors.isEmpty()) { if (!errors.isEmpty()) {
logIngestModuleErrors(errors); logIngestModuleErrors(errors);
} }
ingestJobsById.remove(id);
if (!cancelled) {
IngestManager.getInstance().fireIngestJobCompleted(id);
}
} }
private void logIngestModuleErrors(List<IngestModuleError> errors) { private void logIngestModuleErrors(List<IngestModuleError> errors) {
@ -208,7 +250,7 @@ final class IngestJob {
void cancel() { void cancel() {
cancelled = true; cancelled = true;
fileTasksProgress.finish(); fileTasksProgress.finish();
dataSourceTasksProgress.finish(); dataSourceTasksProgress.finish();
IngestManager.getInstance().fireIngestJobCancelled(id); IngestManager.getInstance().fireIngestJobCancelled(id);
} }

View File

@ -60,7 +60,7 @@ public final class IngestJobContext {
*/ */
public void addFiles(List<AbstractFile> files) { public void addFiles(List<AbstractFile> files) {
for (AbstractFile file : files) { for (AbstractFile file : files) {
IngestScheduler.getInstance().addFileToIngestJob(ingestJob, file); IngestScheduler.getInstance().addFileTaskToIngestJob(ingestJob, file);
} }
} }
} }

View File

@ -56,7 +56,7 @@ public class IngestManager {
private final IngestMonitor ingestMonitor = new IngestMonitor(); private final IngestMonitor ingestMonitor = new IngestMonitor();
private final ExecutorService startIngestJobsThreadPool = Executors.newSingleThreadExecutor(); private final ExecutorService startIngestJobsThreadPool = Executors.newSingleThreadExecutor();
private final ExecutorService dataSourceIngestThreadPool = Executors.newSingleThreadExecutor(); private final ExecutorService dataSourceIngestThreadPool = Executors.newSingleThreadExecutor();
private final ExecutorService fileIngestThreadPool = Executors.newFixedThreadPool(MAX_NUMBER_OF_FILE_INGEST_THREADS); private final ExecutorService fileIngestThreadPool;
private final ExecutorService fireIngestEventsThreadPool = Executors.newSingleThreadExecutor(); private final ExecutorService fireIngestEventsThreadPool = Executors.newSingleThreadExecutor();
private final AtomicLong nextThreadId = new AtomicLong(0L); private final AtomicLong nextThreadId = new AtomicLong(0L);
private final ConcurrentHashMap<Long, Future<Void>> startIngestJobThreads = new ConcurrentHashMap<>(); // Maps thread ids to cancellation handles. private final ConcurrentHashMap<Long, Future<Void>> startIngestJobThreads = new ConcurrentHashMap<>(); // Maps thread ids to cancellation handles.
@ -80,11 +80,13 @@ public class IngestManager {
*/ */
private IngestManager() { private IngestManager() {
startDataSourceIngestThread(); startDataSourceIngestThread();
numberOfFileIngestThreads = UserPreferences.numberOfFileIngestThreads(); numberOfFileIngestThreads = UserPreferences.numberOfFileIngestThreads();
if ((numberOfFileIngestThreads < MIN_NUMBER_OF_FILE_INGEST_THREADS) || (numberOfFileIngestThreads > MAX_NUMBER_OF_FILE_INGEST_THREADS)) { if ((numberOfFileIngestThreads < MIN_NUMBER_OF_FILE_INGEST_THREADS) || (numberOfFileIngestThreads > MAX_NUMBER_OF_FILE_INGEST_THREADS)) {
numberOfFileIngestThreads = DEFAULT_NUMBER_OF_FILE_INGEST_THREADS; numberOfFileIngestThreads = DEFAULT_NUMBER_OF_FILE_INGEST_THREADS;
UserPreferences.setNumberOfFileIngestThreads(numberOfFileIngestThreads); UserPreferences.setNumberOfFileIngestThreads(numberOfFileIngestThreads);
} }
fileIngestThreadPool = Executors.newFixedThreadPool(numberOfFileIngestThreads);
for (int i = 0; i < numberOfFileIngestThreads; ++i) { for (int i = 0; i < numberOfFileIngestThreads; ++i) {
startFileIngestThread(); startFileIngestThread();
} }
@ -165,7 +167,7 @@ public class IngestManager {
* @return True if any ingest jobs are in progress, false otherwise. * @return True if any ingest jobs are in progress, false otherwise.
*/ */
public boolean isIngestRunning() { public boolean isIngestRunning() {
return scheduler.ingestJobsAreRunning(); return IngestJob.ingestJobsAreRunning();
} }
public void cancelAllIngestJobs() { public void cancelAllIngestJobs() {
@ -184,9 +186,8 @@ public class IngestManager {
} }
} }
// Cancel all the jobs already created. This will make the the ingest // Cancel all the jobs already created.
// threads flush out any lingering ingest tasks without processing them. IngestJob.cancelAllIngestJobs();
scheduler.cancelAllIngestJobs();
} }
/** /**
@ -432,7 +433,7 @@ public class IngestManager {
} }
// Start an ingest job for the data source. // Start an ingest job for the data source.
List<IngestModuleError> errors = scheduler.startIngestJob(dataSource, moduleTemplates, processUnallocatedSpace); List<IngestModuleError> errors = IngestJob.startIngestJob(dataSource, moduleTemplates, processUnallocatedSpace);
if (!errors.isEmpty()) { if (!errors.isEmpty()) {
// Report the errors to the user. They have already been logged. // Report the errors to the user. They have already been logged.
StringBuilder moduleStartUpErrors = new StringBuilder(); StringBuilder moduleStartUpErrors = new StringBuilder();
@ -489,7 +490,6 @@ public class IngestManager {
try { try {
IngestTask task = tasks.getNextTask(); // Blocks. IngestTask task = tasks.getNextTask(); // Blocks.
task.execute(); task.execute();
scheduler.ingestTaskIsCompleted(task);
} catch (InterruptedException ex) { } catch (InterruptedException ex) {
break; break;
} }

View File

@ -20,25 +20,25 @@ package org.sleuthkit.autopsy.ingest;
/** /**
* The interface that must be implemented by all ingest modules. * The interface that must be implemented by all ingest modules.
* *
* Autopsy will generally use several instances of an ingest module for each * Autopsy will generally use several instances of an ingest module for each
* ingest job it performs (one for each thread that it is using). * ingest job it performs (one for each thread that it is using).
* *
* Autopsy will call startUp() before any data is processed, will pass any * Autopsy will call startUp() before any data is processed, will pass any data
* data to be analyzed into the process() method (FileIngestModule.process() or DataSourceIngestModule.process()), * to be analyzed into the process() method (FileIngestModule.process() or
* and call shutDown() after * DataSourceIngestModule.process()), and call shutDown() after either all data
* either all data is analyzed or the has has cancelled the job. * is analyzed or the user has canceled the job.
* *
* Autopsy may use multiple threads to complete an ingest job, but it is * Autopsy may use multiple threads to complete an ingest job, but it is
* guaranteed that a module instance will always be called from a single thread. * guaranteed that a module instance will always be called from a single thread.
* Therefore, you can easily have thread-safe code by not using any static * Therefore, you can easily have thread-safe code by not using any static
* member variables. * member variables.
* *
* If the module instances must share resources, the modules are * If the module instances must share resources, the modules are responsible for
* responsible for synchronizing access to the shared resources and doing * synchronizing access to the shared resources and doing reference counting as
* reference counting as required to release those resources correctly. Also, * required to release those resources correctly. Also, more than one ingest job
* more than one ingest job may be in progress at any given time. This must also * may be in progress at any given time. This must also be taken into
* be taken into consideration when sharing resources between module instances. * consideration when sharing resources between module instances.
* *
* TIP: An ingest module that does not require initialization or clean up may * TIP: An ingest module that does not require initialization or clean up may
* extend the abstract IngestModuleAdapter class to get a default "do nothing" * extend the abstract IngestModuleAdapter class to get a default "do nothing"
@ -71,29 +71,14 @@ public interface IngestModule {
/** /**
* Invoked by Autopsy to allow an ingest module instance to set up any * Invoked by Autopsy to allow an ingest module instance to set up any
* internal data structures and acquire any private resources it will need * internal data structures and acquire any private resources it will need
* during an ingest job. * during an ingest job. If the module depends on loading any resources, it
* * should do so in this method so that it can throw an exception in the case
* If the module depends on loading any resources, it should do so in this * of an error and alert the user. Exceptions that are thrown from process()
* method so that it can throw an exception in the case of an error and * and shutDown() are logged, but do not stop processing of the data source.
* alert the user. Exceptions that are thrown from process() and shutDown()
* are logged, but do not stop processing of the data source.
*
* On error, throw a IngestModuleException.
* *
* @param context Provides data and services specific to the ingest job and * @param context Provides data and services specific to the ingest job and
* the ingest pipeline of which the module is a part. * the ingest pipeline of which the module is a part.
* @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException
*/ */
void startUp(IngestJobContext context) throws IngestModuleException; void startUp(IngestJobContext context) throws IngestModuleException;
/**
* Invoked by Autopsy when an ingest job is completed (either because the
* data has been analyzed or because the job was cancelled), before the ingest
* module instance is discarded. The module should respond by doing things
* like releasing private resources, submitting final results, and posting a
* final ingest message.
* @param ingestJobWasCancelled True if this is being called because the user
* cancelled the job.
*/
void shutDown(boolean ingestJobWasCancelled);
} }

View File

@ -1,33 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
/**
* An adapter that provides a default implementation of the IngestModule
* interface.
*/
public abstract class IngestModuleAdapter implements IngestModule {
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
}
@Override
public void shutDown(boolean ingestJobCancelled) {
}
}

View File

@ -23,9 +23,7 @@ import java.util.Collection;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -42,7 +40,6 @@ final class IngestScheduler {
private static final IngestScheduler instance = new IngestScheduler(); private static final IngestScheduler instance = new IngestScheduler();
private static final Logger logger = Logger.getLogger(IngestScheduler.class.getName()); private static final Logger logger = Logger.getLogger(IngestScheduler.class.getName());
private static final int FAT_NTFS_FLAGS = TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT12.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT16.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT32.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS.getValue(); private static final int FAT_NTFS_FLAGS = TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT12.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT16.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT32.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS.getValue();
private final ConcurrentHashMap<Long, IngestJob> ingestJobsById = new ConcurrentHashMap<>();
private final LinkedBlockingQueue<DataSourceIngestTask> dataSourceTasks = new LinkedBlockingQueue<>(); private final LinkedBlockingQueue<DataSourceIngestTask> dataSourceTasks = new LinkedBlockingQueue<>();
private final TreeSet<FileIngestTask> rootDirectoryTasks = new TreeSet<>(new RootDirectoryTaskComparator()); // Guarded by this private final TreeSet<FileIngestTask> rootDirectoryTasks = new TreeSet<>(new RootDirectoryTaskComparator()); // Guarded by this
private final List<FileIngestTask> directoryTasks = new ArrayList<>(); // Guarded by this private final List<FileIngestTask> directoryTasks = new ArrayList<>(); // Guarded by this
@ -50,7 +47,6 @@ final class IngestScheduler {
private final List<IngestTask> tasksInProgress = new ArrayList<>(); // Guarded by this private final List<IngestTask> tasksInProgress = new ArrayList<>(); // Guarded by this
private final DataSourceIngestTaskQueue dataSourceTaskDispenser = new DataSourceIngestTaskQueue(); private final DataSourceIngestTaskQueue dataSourceTaskDispenser = new DataSourceIngestTaskQueue();
private final FileIngestTaskQueue fileTaskDispenser = new FileIngestTaskQueue(); private final FileIngestTaskQueue fileTaskDispenser = new FileIngestTaskQueue();
private final AtomicLong nextIngestJobId = new AtomicLong(0L);
static IngestScheduler getInstance() { static IngestScheduler getInstance() {
return instance; return instance;
@ -59,42 +55,7 @@ final class IngestScheduler {
private IngestScheduler() { private IngestScheduler() {
} }
/** synchronized void scheduleTasksForIngestJob(IngestJob job, Content dataSource) throws InterruptedException {
* Creates an ingest job for a data source.
*
* @param rootDataSource The data source to ingest.
* @param ingestModuleTemplates The ingest module templates to use to create
* the ingest pipelines for the job.
* @param processUnallocatedSpace Whether or not the job should include
* processing of unallocated space.
* @return A collection of ingest module start up errors, empty on success.
* @throws InterruptedException
*/
List<IngestModuleError> startIngestJob(Content dataSource, List<IngestModuleTemplate> ingestModuleTemplates, boolean processUnallocatedSpace) throws InterruptedException {
long jobId = nextIngestJobId.incrementAndGet();
IngestJob job = new IngestJob(jobId, dataSource, ingestModuleTemplates, processUnallocatedSpace);
ingestJobsById.put(jobId, job);
IngestManager.getInstance().fireIngestJobStarted(jobId);
List<IngestModuleError> errors = job.startUp();
if (errors.isEmpty()) {
addDataSourceToIngestJob(job, dataSource);
} else {
ingestJobsById.remove(jobId);
IngestManager.getInstance().fireIngestJobCancelled(jobId);
}
return errors;
}
boolean ingestJobsAreRunning() {
for (IngestJob job : ingestJobsById.values()) {
if (!job.isCancelled()) {
return true;
}
}
return false;
}
synchronized void addDataSourceToIngestJob(IngestJob job, Content dataSource) throws InterruptedException {
// Enqueue a data source ingest task for the data source. // Enqueue a data source ingest task for the data source.
// If the thread executing this code is interrupted, it is because the // If the thread executing this code is interrupted, it is because the
// the number of ingest threads has been decreased while ingest jobs are // the number of ingest threads has been decreased while ingest jobs are
@ -153,7 +114,7 @@ final class IngestScheduler {
updateFileTaskQueues(null); updateFileTaskQueues(null);
} }
void addFileToIngestJob(IngestJob job, AbstractFile file) { void addFileTaskToIngestJob(IngestJob job, AbstractFile file) {
FileIngestTask task = new FileIngestTask(job, file); FileIngestTask task = new FileIngestTask(job, file);
if (shouldEnqueueFileTask(task)) { if (shouldEnqueueFileTask(task)) {
addTaskToFileQueue(task); addTaskToFileQueue(task);
@ -271,12 +232,6 @@ final class IngestScheduler {
return true; return true;
} }
void cancelAllIngestJobs() {
for (IngestJob job : ingestJobsById.values()) {
job.cancel();
}
}
IngestTaskQueue getDataSourceIngestTaskQueue() { IngestTaskQueue getDataSourceIngestTaskQueue() {
return dataSourceTaskDispenser; return dataSourceTaskDispenser;
} }
@ -285,16 +240,7 @@ final class IngestScheduler {
return fileTaskDispenser; return fileTaskDispenser;
} }
void ingestTaskIsCompleted(IngestTask completedTask) { synchronized boolean isLastTaskForIngestJob(IngestTask completedTask) {
if (ingestJobIsCompleted(completedTask)) {
IngestJob job = completedTask.getIngestJob();
job.shutDown();
ingestJobsById.remove(job.getId());
IngestManager.getInstance().fireIngestJobCompleted(job.getId());
}
}
private synchronized boolean ingestJobIsCompleted(IngestTask completedTask) {
tasksInProgress.remove(completedTask); tasksInProgress.remove(completedTask);
IngestJob job = completedTask.getIngestJob(); IngestJob job = completedTask.getIngestJob();
long jobId = job.getId(); long jobId = job.getId();

View File

@ -36,7 +36,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level; import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.ImageUtils; import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
@ -56,7 +55,7 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
* files. Ingests an image file and, if available, adds it's date, latitude, * files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact. * longitude, altitude, device model, and device make to a blackboard artifact.
*/ */
public final class ExifParserFileIngestModule extends IngestModuleAdapter implements FileIngestModule { public final class ExifParserFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance(); private final IngestServices services = IngestServices.getInstance();
@ -198,7 +197,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
} }
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
// We only need to check for this final event on the last module per job // We only need to check for this final event on the last module per job
if (refCounter.decrementAndGet(jobId) == 0) { if (refCounter.decrementAndGet(jobId) == 0) {
if (filesToFire) { if (filesToFire) {

View File

@ -26,7 +26,6 @@ import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
@ -44,7 +43,7 @@ import org.sleuthkit.datamodel.TskException;
/** /**
* Flags mismatched filename extensions based on file signature. * Flags mismatched filename extensions based on file signature.
*/ */
public class FileExtMismatchIngestModule extends IngestModuleAdapter implements FileIngestModule { public class FileExtMismatchIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(FileExtMismatchIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(FileExtMismatchIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance(); private final IngestServices services = IngestServices.getInstance();
@ -173,7 +172,7 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
} }
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
// We only need to post the summary msg from the last module per job // We only need to post the summary msg from the last module per job
if (refCounter.decrementAndGet(jobId) == 0) { if (refCounter.decrementAndGet(jobId) == 0) {
IngestJobTotals jobTotals; IngestJobTotals jobTotals;

View File

@ -34,14 +34,13 @@ import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.FileKnown;
import org.sleuthkit.datamodel.TskException; import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
/** /**
* Detects the type of a file based on signature (magic) values. Posts results * Detects the type of a file based on signature (magic) values. Posts results
* to the blackboard. * to the blackboard.
*/ */
public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileIngestModule { public class FileTypeIdIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName());
private static final long MIN_FILE_SIZE = 512; private static final long MIN_FILE_SIZE = 512;
@ -129,7 +128,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
} }
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
// We only need to post the summary msg from the last module per job // We only need to post the summary msg from the last module per job
if (refCounter.decrementAndGet(jobId) == 0) { if (refCounter.decrementAndGet(jobId) == 0) {
IngestJobTotals jobTotals; IngestJobTotals jobTotals;

View File

@ -46,7 +46,6 @@ import org.netbeans.api.progress.ProgressHandleFactory;
import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestMonitor; import org.sleuthkit.autopsy.ingest.IngestMonitor;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -66,7 +65,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
* 7Zip ingest module extracts supported archives, adds extracted DerivedFiles, * 7Zip ingest module extracts supported archives, adds extracted DerivedFiles,
* reschedules extracted DerivedFiles for ingest. * reschedules extracted DerivedFiles for ingest.
*/ */
public final class SevenZipIngestModule extends IngestModuleAdapter implements FileIngestModule { public final class SevenZipIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(SevenZipIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(SevenZipIngestModule.class.getName());
private IngestServices services = IngestServices.getInstance(); private IngestServices services = IngestServices.getInstance();
@ -186,7 +185,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
} }
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
// We don't need the value, but for cleanliness and consistency // We don't need the value, but for cleanliness and consistency
refCounter.decrementAndGet(jobId); refCounter.decrementAndGet(jobId);
} }

View File

@ -42,12 +42,11 @@ import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskException; import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb; import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.HashInfo; import org.sleuthkit.datamodel.HashInfo;
public class HashDbIngestModule extends IngestModuleAdapter implements FileIngestModule { public class HashDbIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName());
private static final int MAX_COMMENT_SIZE = 500; private static final int MAX_COMMENT_SIZE = 500;
private final IngestServices services = IngestServices.getInstance(); private final IngestServices services = IngestServices.getInstance();
@ -359,7 +358,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
} }
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
if (refCounter.decrementAndGet(jobId) == 0) { if (refCounter.decrementAndGet(jobId) == 0) {
postSummary(); postSummary();
} }

View File

@ -36,7 +36,6 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
@ -55,7 +54,7 @@ import org.sleuthkit.datamodel.TskData.FileKnown;
* on currently configured lists for ingest and writes results to blackboard * on currently configured lists for ingest and writes results to blackboard
* Reports interesting events to Inbox and to viewers * Reports interesting events to Inbox and to viewers
*/ */
public final class KeywordSearchIngestModule extends IngestModuleAdapter implements FileIngestModule { public final class KeywordSearchIngestModule implements FileIngestModule {
enum UpdateFrequency { enum UpdateFrequency {
@ -93,6 +92,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
private static AtomicInteger instanceCount = new AtomicInteger(0); //just used for logging private static AtomicInteger instanceCount = new AtomicInteger(0); //just used for logging
private int instanceNum = 0; private int instanceNum = 0;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private IngestJobContext context;
private enum IngestStatus { private enum IngestStatus {
@ -136,6 +136,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
caseHandle = Case.getCurrentCase().getSleuthkitCase(); caseHandle = Case.getCurrentCase().getSleuthkitCase();
tikaFormatDetector = new Tika(); tikaFormatDetector = new Tika();
ingester = Server.getIngester(); ingester = Server.getIngester();
this.context = context;
// increment the module reference count // increment the module reference count
// if first instance of this module for this job then check the server and existence of keywords // if first instance of this module for this job then check the server and existence of keywords
@ -248,14 +249,14 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
* Cleanup resources, threads, timers * Cleanup resources, threads, timers
*/ */
@Override @Override
public void shutDown(boolean ingestJobCancelled) { public void shutDown() {
logger.log(Level.INFO, "Instance {0}", instanceNum); //NON-NLS logger.log(Level.INFO, "Instance {0}", instanceNum); //NON-NLS
if (initialized == false) { if (initialized == false) {
return; return;
} }
if (ingestJobCancelled) { if (context.isJobCancelled()) {
logger.log(Level.INFO, "Ingest job cancelled"); //NON-NLS logger.log(Level.INFO, "Ingest job cancelled"); //NON-NLS
stop(); stop();
return; return;

View File

@ -36,13 +36,12 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
/** /**
* Recent activity image ingest module * Recent activity image ingest module
*/ */
public final class RAImageIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { public final class RAImageIngestModule implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName());
private final List<Extract> extracters = new ArrayList<>(); private final List<Extract> extracters = new ArrayList<>();
@ -160,14 +159,9 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
historyMsg.toString()); historyMsg.toString());
services.postMessage(inboxMsg); services.postMessage(inboxMsg);
return ProcessResult.OK; if (context.isJobCancelled()) {
}
@Override
public void shutDown(boolean ingestJobCancelled) {
if (ingestJobCancelled) {
stop(); stop();
return; return ProcessResult.OK;
} }
for (int i = 0; i < extracters.size(); i++) { for (int i = 0; i < extracters.size(); i++) {
@ -180,6 +174,8 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
extracter.getName())); extracter.getName()));
} }
} }
return ProcessResult.OK;
} }
private void stop() { private void stop() {

View File

@ -29,7 +29,6 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
@ -50,7 +49,7 @@ import org.sleuthkit.datamodel.Volume;
/** /**
* Scalpel carving ingest module * Scalpel carving ingest module
*/ */
class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileIngestModule { class ScalpelCarverIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName());
private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; //NON-NLS private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; //NON-NLS
@ -228,4 +227,8 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
return ProcessResult.OK; return ProcessResult.OK;
} }
@Override
public void shutDown() {
}
} }

View File

@ -23,7 +23,6 @@ import java.security.NoSuchAlgorithmException;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import javax.xml.bind.DatatypeConverter; import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
@ -41,7 +40,7 @@ import org.openide.util.NbBundle;
* Format (EWF) E01 image file by generating a hash of the file and comparing it * Format (EWF) E01 image file by generating a hash of the file and comparing it
* to the value stored in the image. * to the value stored in the image.
*/ */
public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { public class EwfVerifyIngestModule implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(EwfVerifyIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(EwfVerifyIngestModule.class.getName());
private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
@ -62,7 +61,6 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context; this.context = context;
verified = false; verified = false;
skipped = false;
img = null; img = null;
imgName = ""; imgName = "";
storedHash = ""; storedHash = "";
@ -104,7 +102,6 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
NbBundle.getMessage(this.getClass(), NbBundle.getMessage(this.getClass(),
"EwfVerifyIngestModule.process.skipNonEwf", "EwfVerifyIngestModule.process.skipNonEwf",
imgName))); imgName)));
skipped = true;
return ProcessResult.OK; return ProcessResult.OK;
} }
@ -169,26 +166,22 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
verified = calculatedHash.equals(storedHash); verified = calculatedHash.equals(storedHash);
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash}); //NON-NLS logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash}); //NON-NLS
logger.log(Level.INFO, "complete() {0}", EwfVerifierModuleFactory.getModuleName()); //NON-NLS
String msg;
if (verified) {
msg = NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.verified");
} else {
msg = NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.notVerified");
}
String extra = NbBundle
.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.verifyResultsHeader", imgName);
extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.resultLi", msg);
extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.calcHashLi", calculatedHash);
extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.storedHashLi", storedHash);
services.postMessage(IngestMessage.createMessage( MessageType.INFO, EwfVerifierModuleFactory.getModuleName(), imgName + msg, extra));
logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg});
return ProcessResult.OK; return ProcessResult.OK;
} }
@Override
public void shutDown(boolean ingestJobCancelled) {
logger.log(Level.INFO, "complete() {0}", EwfVerifierModuleFactory.getModuleName()); //NON-NLS
if (skipped == false) {
String msg = "";
if (verified) {
msg = NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.verified");
} else {
msg = NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.notVerified");
}
String extra = NbBundle
.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.verifyResultsHeader", imgName);
extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.resultLi", msg);
extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.calcHashLi", calculatedHash);
extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.storedHashLi", storedHash);
services.postMessage(IngestMessage.createMessage( MessageType.INFO, EwfVerifierModuleFactory.getModuleName(), imgName + msg, extra));
logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg});
}
}
} }

View File

@ -31,7 +31,6 @@ import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
@ -50,7 +49,7 @@ import org.sleuthkit.datamodel.TskException;
* Understands Thunderbird folder layout to provide additional structure and * Understands Thunderbird folder layout to provide additional structure and
* metadata. * metadata.
*/ */
public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter implements FileIngestModule { public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName());
private IngestServices services = IngestServices.getInstance(); private IngestServices services = IngestServices.getInstance();
@ -403,4 +402,8 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
IngestServices getServices() { IngestServices getServices() {
return services; return services;
} }
@Override
public void shutDown() {
}
} }