Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Karl Mortensen 2014-11-07 08:28:42 -05:00
commit 5f13af169f
20 changed files with 446 additions and 353 deletions

View File

@ -150,6 +150,9 @@ public final class ExecUtil {
} }
} }
/**
* EVERYTHING FOLLOWING THIS LINE IS DEPRECATED AND SLATED FOR REMOVAL
*/
private static final Logger logger = Logger.getLogger(ExecUtil.class.getName()); private static final Logger logger = Logger.getLogger(ExecUtil.class.getName());
private Process proc = null; private Process proc = null;
private ExecUtil.StreamToStringRedirect errorStringRedirect = null; private ExecUtil.StreamToStringRedirect errorStringRedirect = null;

View File

@ -52,6 +52,7 @@ import org.sleuthkit.autopsy.externalresults.ExternalResults;
import org.sleuthkit.autopsy.externalresults.ExternalResultsImporter; import org.sleuthkit.autopsy.externalresults.ExternalResultsImporter;
import org.sleuthkit.autopsy.externalresults.ExternalResultsXMLParser; import org.sleuthkit.autopsy.externalresults.ExternalResultsXMLParser;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
@ -74,14 +75,14 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final String moduleName = SampleExecutableIngestModuleFactory.getModuleName(); private static final String moduleName = SampleExecutableIngestModuleFactory.getModuleName();
private final String fileInCaseDatabase = "/WINDOWS/system32/ntmsapi.dll"; // Probably private final String fileInCaseDatabase = "/WINDOWS/system32/ntmsapi.dll"; // Probably
private long jobId; private IngestJobContext context;
private String outputDirPath; private String outputDirPath;
private String derivedFileInCaseDatabase; private String derivedFileInCaseDatabase;
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId(); this.context = context;
if (refCounter.incrementAndGet(jobId) == 1) { if (refCounter.incrementAndGet(context.getJobId()) == 1) {
// Create an output directory for this job. // Create an output directory for this job.
outputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator + moduleName; //NON-NLS outputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator + moduleName; //NON-NLS
File outputDir = new File(outputDirPath); File outputDir = new File(outputDirPath);
@ -93,7 +94,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
@Override @Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
if (refCounter.get(jobId) == 1) { if (refCounter.get(context.getJobId()) == 1) {
try { try {
// There will be two tasks: data source analysis and import of // There will be two tasks: data source analysis and import of
// the results of the analysis. // the results of the analysis.
@ -108,14 +109,18 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
// derived files, and reports generated by the analysis. In this // derived files, and reports generated by the analysis. In this
// sample ingest module, the generation of the analysis results is // sample ingest module, the generation of the analysis results is
// simulated. // simulated.
String resultsFilePath = outputDirPath + File.separator + String.format("job_%d_results.xml", jobId); String resultsFilePath = outputDirPath + File.separator + String.format("job_%d_results.xml", context.getJobId());
boolean haveRealExecutable = false; boolean haveRealExecutable = false;
if (haveRealExecutable) { if (haveRealExecutable) {
if (dataSource instanceof Image) { if (dataSource instanceof Image) {
Image image = (Image)dataSource; Image image = (Image)dataSource;
String dataSourcePath = image.getPaths()[0]; String dataSourcePath = image.getPaths()[0];
ExecUtil executor = new ExecUtil(); List<String> commandLine = new ArrayList<>();
executor.execute("some.exe", dataSourcePath, resultsFilePath); commandLine.add("some.exe");
commandLine.add(dataSourcePath);
commandLine.add(resultsFilePath);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
} }
// not a disk image // not a disk image
else { else {
@ -136,7 +141,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
IngestServices.getInstance().postMessage(IngestMessage.createErrorMessage(moduleName, "External Results Import Error", errorInfo.getMessage())); IngestServices.getInstance().postMessage(IngestMessage.createErrorMessage(moduleName, "External Results Import Error", errorInfo.getMessage()));
} }
progressBar.progress(2); progressBar.progress(2);
} catch (InterruptedException | ParserConfigurationException | TransformerException | IOException ex) { } catch (ParserConfigurationException | TransformerException | IOException ex) {
Logger logger = IngestServices.getInstance().getLogger(moduleName); Logger logger = IngestServices.getInstance().getLogger(moduleName);
logger.log(Level.SEVERE, "Failed to simulate analysis and results import", ex); //NON-NLS logger.log(Level.SEVERE, "Failed to simulate analysis and results import", ex); //NON-NLS
return ProcessResult.ERROR; return ProcessResult.ERROR;
@ -155,7 +160,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
List<String> filePaths = new ArrayList<>(); List<String> filePaths = new ArrayList<>();
String fileContents = "This is a simulated derived file."; String fileContents = "This is a simulated derived file.";
for (int i = 0; i < 2; ++i) { for (int i = 0; i < 2; ++i) {
String fileName = String.format("job_%d_derived_file_%d.txt", jobId, i); String fileName = String.format("job_%d_derived_file_%d.txt", context.getJobId(), i);
filePaths.add(generateFile(fileName, fileContents.getBytes())); filePaths.add(generateFile(fileName, fileContents.getBytes()));
if (i == 0) { if (i == 0) {
this.derivedFileInCaseDatabase = this.fileInCaseDatabase + "/" + fileName; this.derivedFileInCaseDatabase = this.fileInCaseDatabase + "/" + fileName;
@ -168,7 +173,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
List<String> filePaths = new ArrayList<>(); List<String> filePaths = new ArrayList<>();
String fileContents = "This is a simulated report."; String fileContents = "This is a simulated report.";
for (int i = 0; i < 2; ++i) { for (int i = 0; i < 2; ++i) {
String fileName = String.format("job_%d_report_%d.txt", jobId, i); String fileName = String.format("job_%d_report_%d.txt", context.getJobId(), i);
filePaths.add(generateFile(fileName, fileContents.getBytes())); filePaths.add(generateFile(fileName, fileContents.getBytes()));
} }
return filePaths; return filePaths;

View File

@ -71,14 +71,14 @@ public class DataSourceIngestModuleProgress {
} }
/** /**
* Updates the progress bar with the number of work units performed, if in * Updates the progress bar with a new task name and the number of work
* the determinate mode. * units performed, if in the determinate mode.
* *
* @param message Message to display in sub-title * @param currentTask The task name.
* @param workUnits Number of work units performed so far by the module. * @param workUnits Number of work units performed so far by the module.
*/ */
public void progress(String message, int workUnits) { public void progress(String currentTask, int workUnits) {
this.job.advanceDataSourceIngestProgressBar(message, workUnits); this.job.advanceDataSourceIngestProgressBar(currentTask, workUnits);
} }
} }

View File

@ -71,6 +71,7 @@ final class DataSourceIngestPipeline {
"IngestJob.progress.dataSourceIngest.displayName", "IngestJob.progress.dataSourceIngest.displayName",
module.getDisplayName(), dataSource.getName()); module.getDisplayName(), dataSource.getName());
this.job.updateDataSourceIngestProgressBarDisplayName(displayName); this.job.updateDataSourceIngestProgressBarDisplayName(displayName);
this.job.switchDataSourceIngestProgressBarToIndeterminate();
ingestManager.setIngestTaskProgress(task, module.getDisplayName()); ingestManager.setIngestTaskProgress(task, module.getDisplayName());
module.process(dataSource, new DataSourceIngestModuleProgress(this.job)); module.process(dataSource, new DataSourceIngestModuleProgress(this.job));
} catch (Exception ex) { // Catch-all exception firewall } catch (Exception ex) { // Catch-all exception firewall

View File

@ -42,22 +42,12 @@ import org.sleuthkit.datamodel.Content;
*/ */
final class IngestJob { final class IngestJob {
/**
* An ingest job may have multiple stages.
*/
private enum Stages {
/**
* High priority data source ingest modules and file ingest modules.
*/
FIRST,
/**
* Lower priority, usually long-running, data source ingest modules.
*/
SECOND
};
private static final Logger logger = Logger.getLogger(IngestJob.class.getName()); private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
/**
* The task scheduler singleton is responsible for creating and scheduling
* the ingest tasks that make up ingest jobs.
*/
private static final IngestTasksScheduler taskScheduler = IngestTasksScheduler.getInstance(); private static final IngestTasksScheduler taskScheduler = IngestTasksScheduler.getInstance();
/** /**
@ -69,34 +59,66 @@ final class IngestJob {
private static final ConcurrentHashMap<Long, IngestJob> jobsById = new ConcurrentHashMap<>(); private static final ConcurrentHashMap<Long, IngestJob> jobsById = new ConcurrentHashMap<>();
/** /**
* These fields define the ingest job and the work it entails. * These fields define the ingest job, including its ingest pipelines. Note
* that there is a collection of multiple copies of the file ingest
* pipeline, one for each file ingest thread.
*/ */
private final long id; private final long id;
private final Content dataSource; private final Content dataSource;
private final boolean processUnallocatedSpace; private final boolean processUnallocatedSpace;
private Stages stage; private final Object dataSourceIngestPipelineLock;
private DataSourceIngestPipeline dataSourceIngestPipeline;
private DataSourceIngestPipeline firstStageDataSourceIngestPipeline; private DataSourceIngestPipeline firstStageDataSourceIngestPipeline;
private DataSourceIngestPipeline secondStageDataSourceIngestPipeline; private DataSourceIngestPipeline secondStageDataSourceIngestPipeline;
private DataSourceIngestPipeline currentDataSourceIngestPipeline;
private final LinkedBlockingQueue<FileIngestPipeline> fileIngestPipelines; private final LinkedBlockingQueue<FileIngestPipeline> fileIngestPipelines;
/** /**
* These fields are used to update ingest progress UI components for the * An ingest runs in stages.
* job. The filesInProgress collection contains the names of the files that */
* are in the file ingest pipelines and the two file counter fields are used private static enum Stages {
* to update the file ingest progress bar.
/**
* Setting up for processing.
*/
INITIALIZATION,
/**
* Running high priority data source level ingest modules and file level
* ingest modules.
*/
FIRST,
/**
* Running lower priority, usually long-running, data source level
* ingest modules.
*/
SECOND,
/**
* Cleaning up.
*/
FINALIZATION
};
private Stages stage;
private final Object stageCompletionCheckLock;
/**
* These fields are used to provide data source level task progress bars for
* the job.
*/ */
private ProgressHandle dataSourceIngestProgress;
private final Object dataSourceIngestProgressLock; private final Object dataSourceIngestProgressLock;
private ProgressHandle dataSourceIngestProgress;
/**
* These fields are used to provide file level ingest task progress bars for
* the job.
*/
private final Object fileIngestProgressLock;
private final List<String> filesInProgress; private final List<String> filesInProgress;
private long estimatedFilesToProcess; private long estimatedFilesToProcess;
private long processedFiles; private long processedFiles;
private ProgressHandle fileIngestProgress; private ProgressHandle fileIngestProgress;
private final Object fileIngestProgressLock;
/** /**
* These fields support cancellation of either the currently running data * These fields support cancellation of either the currently running data
* source ingest module or the entire ingest job. * source level ingest module or the entire ingest job.
*/ */
private volatile boolean currentDataSourceIngestModuleCancelled; private volatile boolean currentDataSourceIngestModuleCancelled;
private volatile boolean cancelled; private volatile boolean cancelled;
@ -159,7 +181,7 @@ final class IngestJob {
static List<IngestJobSnapshot> getJobSnapshots() { static List<IngestJobSnapshot> getJobSnapshots() {
List<IngestJobSnapshot> snapShots = new ArrayList<>(); List<IngestJobSnapshot> snapShots = new ArrayList<>();
for (IngestJob job : IngestJob.jobsById.values()) { for (IngestJob job : IngestJob.jobsById.values()) {
snapShots.add(job.getIngestJobSnapshot()); snapShots.add(job.getSnapshot());
} }
return snapShots; return snapShots;
} }
@ -185,11 +207,13 @@ final class IngestJob {
this.id = id; this.id = id;
this.dataSource = dataSource; this.dataSource = dataSource;
this.processUnallocatedSpace = processUnallocatedSpace; this.processUnallocatedSpace = processUnallocatedSpace;
this.stage = IngestJob.Stages.FIRST; this.dataSourceIngestPipelineLock = new Object();
this.fileIngestPipelines = new LinkedBlockingQueue<>(); this.fileIngestPipelines = new LinkedBlockingQueue<>();
this.filesInProgress = new ArrayList<>(); this.filesInProgress = new ArrayList<>();
this.dataSourceIngestProgressLock = new Object(); this.dataSourceIngestProgressLock = new Object();
this.fileIngestProgressLock = new Object(); this.fileIngestProgressLock = new Object();
this.stage = IngestJob.Stages.INITIALIZATION;
this.stageCompletionCheckLock = new Object();
this.startTime = new Date().getTime(); this.startTime = new Date().getTime();
} }
@ -205,15 +229,15 @@ final class IngestJob {
/** /**
* Gets the data source to be ingested by this job. * Gets the data source to be ingested by this job.
* *
* @return A reference to a Content object representing the data source. * @return A Content object representing the data source.
*/ */
Content getDataSource() { Content getDataSource() {
return this.dataSource; return this.dataSource;
} }
/** /**
* Queries whether or not unallocated space should be processed as part of * Gets whether or not unallocated space should be processed as part of this
* this job. * job.
* *
* @return True or false. * @return True or false.
*/ */
@ -222,22 +246,30 @@ final class IngestJob {
} }
/** /**
* Passes the data source for this job through a data source ingest * Passes the data source for this job through the currently active data
* pipeline. * source level ingest pipeline.
* *
* @param task A data source ingest task wrapping the data source. * @param task A data source ingest task wrapping the data source.
*/ */
void process(DataSourceIngestTask task) { void process(DataSourceIngestTask task) {
try { try {
if (!this.isCancelled() && !this.dataSourceIngestPipeline.isEmpty()) { synchronized (this.dataSourceIngestPipelineLock) {
List<IngestModuleError> errors = new ArrayList<>(); if (!this.isCancelled() && !this.currentDataSourceIngestPipeline.isEmpty()) {
errors.addAll(this.dataSourceIngestPipeline.process(task)); /**
if (!errors.isEmpty()) { * Run the data source through the pipeline.
logIngestModuleErrors(errors); */
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(this.currentDataSourceIngestPipeline.process(task));
if (!errors.isEmpty()) {
logIngestModuleErrors(errors);
}
} }
} }
// Shut down the data source ingest progress bar right away. /**
* Shut down the data source ingest progress bar right away. Data
* source-level processing is finished for this stage.
*/
synchronized (this.dataSourceIngestProgressLock) { synchronized (this.dataSourceIngestProgressLock) {
if (null != this.dataSourceIngestProgress) { if (null != this.dataSourceIngestProgress) {
this.dataSourceIngestProgress.finish(); this.dataSourceIngestProgress.finish();
@ -245,21 +277,22 @@ final class IngestJob {
} }
} }
} finally { } finally {
// No matter what happens, let the task scheduler know that this /**
// task is completed and check for job completion. * No matter what happens, do ingest task bookkeeping.
*/
IngestJob.taskScheduler.notifyTaskCompleted(task); IngestJob.taskScheduler.notifyTaskCompleted(task);
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) { this.checkForStageCompleted();
this.handleTasksCompleted();
}
} }
} }
/** /**
* Passes the a file from the data source for this job through the file * Passes a file from the data source for this job through the file level
* ingest pipeline. * ingest pipeline.
* *
* @param task A file ingest task. * @param task A file ingest task.
* @throws InterruptedException * @throws InterruptedException if the thread executing this code is
* interrupted while blocked on taking from or putting to the file ingest
* pipelines collection.
*/ */
void process(FileIngestTask task) throws InterruptedException { void process(FileIngestTask task) throws InterruptedException {
try { try {
@ -275,7 +308,9 @@ final class IngestJob {
*/ */
AbstractFile file = task.getFile(); AbstractFile file = task.getFile();
// Update the file ingest progress bar. /**
* Update the file ingest progress bar.
*/
synchronized (this.fileIngestProgressLock) { synchronized (this.fileIngestProgressLock) {
++this.processedFiles; ++this.processedFiles;
if (this.processedFiles <= this.estimatedFilesToProcess) { if (this.processedFiles <= this.estimatedFilesToProcess) {
@ -286,15 +321,19 @@ final class IngestJob {
this.filesInProgress.add(file.getName()); this.filesInProgress.add(file.getName());
} }
// Run the file through the pipeline. /**
* Run the file through the pipeline.
*/
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(pipeline.process(task)); errors.addAll(pipeline.process(task));
if (!errors.isEmpty()) { if (!errors.isEmpty()) {
logIngestModuleErrors(errors); logIngestModuleErrors(errors);
} }
// Update the file ingest progress bar again in case the /**
// file was being displayed. * Update the file ingest progress bar again, in case the
* file was being displayed.
*/
if (!this.cancelled) { if (!this.cancelled) {
synchronized (this.fileIngestProgressLock) { synchronized (this.fileIngestProgressLock) {
this.filesInProgress.remove(file.getName()); this.filesInProgress.remove(file.getName());
@ -307,27 +346,33 @@ final class IngestJob {
} }
} }
// Relinquish the pipeline so it can be reused by another file /**
// ingest thread. * Relinquish the pipeline so it can be reused by another file
* ingest thread.
*/
this.fileIngestPipelines.put(pipeline); this.fileIngestPipelines.put(pipeline);
} }
} finally { } finally {
// No matter what happens, let the task scheduler know that this /**
// task is completed and check for job completion. * No matter what happens, do ingest task bookkeeping.
*/
IngestJob.taskScheduler.notifyTaskCompleted(task); IngestJob.taskScheduler.notifyTaskCompleted(task);
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) { this.checkForStageCompleted();
this.handleTasksCompleted();
}
} }
} }
/** /**
* Adds more files to an ingest job, i.e., derived or carved files. Not * Adds more files to an ingest job, i.e., extracted or carved files. Not
* currently supported for the second stage of the job. * currently supported for the second stage of the job.
* *
* @param files A list of files to add. * @param files A list of files to add.
*/ */
void addFiles(List<AbstractFile> files) { void addFiles(List<AbstractFile> files) {
/**
* Note: This implementation assumes that this is being called by an an
* ingest module running code on an ingest thread that is holding a
* reference to an ingest task, so no task completion check is done.
*/
if (IngestJob.Stages.FIRST == this.stage) { if (IngestJob.Stages.FIRST == this.stage) {
for (AbstractFile file : files) { for (AbstractFile file : files) {
IngestJob.taskScheduler.scheduleFileIngestTask(this, file); IngestJob.taskScheduler.scheduleFileIngestTask(this, file);
@ -335,10 +380,18 @@ final class IngestJob {
} else { } else {
IngestJob.logger.log(Level.SEVERE, "Adding files during second stage not supported"); //NON-NLS IngestJob.logger.log(Level.SEVERE, "Adding files during second stage not supported"); //NON-NLS
} }
/**
* The intended clients of this method are ingest modules running code
* on an ingest thread that is holding a reference to an ingest task, in
* which case a task completion check would not be necessary. This is a
* bit of defensive programming.
*/
this.checkForStageCompleted();
} }
/** /**
* Updates the display name of the data source ingest progress bar. * Updates the display name of the data source level ingest progress bar.
* *
* @param displayName The new display name. * @param displayName The new display name.
*/ */
@ -351,8 +404,9 @@ final class IngestJob {
} }
/** /**
* Switches the data source progress bar to determinate mode. This should be * Switches the data source level ingest progress bar to determinate mode.
* called if the total work units to process the data source is known. * This should be called if the total work units to process the data source
* is known.
* *
* @param workUnits Total number of work units for the processing of the * @param workUnits Total number of work units for the processing of the
* data source. * data source.
@ -368,9 +422,9 @@ final class IngestJob {
} }
/** /**
* Switches the data source ingest progress bar to indeterminate mode. This * Switches the data source level ingest progress bar to indeterminate mode.
* should be called if the total work units to process the data source is * This should be called if the total work units to process the data source
* unknown. * is unknown.
*/ */
void switchDataSourceIngestProgressBarToIndeterminate() { void switchDataSourceIngestProgressBarToIndeterminate() {
if (!this.cancelled) { if (!this.cancelled) {
@ -383,8 +437,8 @@ final class IngestJob {
} }
/** /**
* Updates the data source ingest progress bar with the number of work units * Updates the data source level ingest progress bar with the number of work
* performed, if in the determinate mode. * units performed, if in the determinate mode.
* *
* @param workUnits Number of work units performed. * @param workUnits Number of work units performed.
*/ */
@ -399,39 +453,41 @@ final class IngestJob {
} }
/** /**
* Updates the data source ingest progress bar display name. * Updates the data source level ingest progress with a new task name, where
* the task name is the "subtitle" under the display name.
* *
* @param displayName The new display name. * @param currentTask The task name.
*/ */
void advanceDataSourceIngestProgressBar(String displayName) { void advanceDataSourceIngestProgressBar(String currentTask) {
if (!this.cancelled) { if (!this.cancelled) {
synchronized (this.dataSourceIngestProgressLock) { synchronized (this.dataSourceIngestProgressLock) {
if (null != this.dataSourceIngestProgress) { if (null != this.dataSourceIngestProgress) {
this.dataSourceIngestProgress.progress(displayName); this.dataSourceIngestProgress.progress(currentTask);
} }
} }
} }
} }
/** /**
* Updates the progress bar with the number of work units performed, if in * Updates the data source level ingest progress bar with a new task name
* the determinate mode. * and the number of work units performed, if in the determinate mode. The
* task name is the "subtitle" under the display name.
* *
* @param message Message to display in sub-title * @param currentTask The task name.
* @param workUnits Number of work units performed. * @param workUnits Number of work units performed.
*/ */
void advanceDataSourceIngestProgressBar(String message, int workUnits) { void advanceDataSourceIngestProgressBar(String currentTask, int workUnits) {
if (!this.cancelled) { if (!this.cancelled) {
synchronized (this.fileIngestProgressLock) { synchronized (this.fileIngestProgressLock) {
this.dataSourceIngestProgress.progress(message, workUnits); this.dataSourceIngestProgress.progress(currentTask, workUnits);
} }
} }
} }
/** /**
* Determines whether or not a temporary cancellation of data source ingest * Queries whether or not a temporary cancellation of data source level
* in order to stop the currently executing data source ingest module is in * ingest in order to stop the currently executing data source level ingest
* effect. * module is in effect.
* *
* @return True or false. * @return True or false.
*/ */
@ -440,17 +496,19 @@ final class IngestJob {
} }
/** /**
* Rescind a temporary cancellation of data source ingest in order to stop * Rescind a temporary cancellation of data source level ingest that was
* the currently executing data source ingest module. * used to stop a single data source level ingest module.
*/ */
void currentDataSourceIngestModuleCancellationCompleted() { void currentDataSourceIngestModuleCancellationCompleted() {
this.currentDataSourceIngestModuleCancelled = false; this.currentDataSourceIngestModuleCancelled = false;
// A new progress bar must be created because the cancel button of the /**
// previously constructed component is disabled by NetBeans when the * A new progress bar must be created because the cancel button of the
// user selects the "OK" button of the cancellation confirmation dialog * previously constructed component is disabled by NetBeans when the
// popped up by NetBeans when the progress bar cancel button was * user selects the "OK" button of the cancellation confirmation dialog
// pressed. * popped up by NetBeans when the progress bar cancel button was
* pressed.
*/
synchronized (this.dataSourceIngestProgressLock) { synchronized (this.dataSourceIngestProgressLock) {
this.dataSourceIngestProgress.finish(); this.dataSourceIngestProgress.finish();
this.dataSourceIngestProgress = null; this.dataSourceIngestProgress = null;
@ -459,12 +517,14 @@ final class IngestJob {
} }
/** /**
* Requests cancellation of ingest, i.e., a shutdown of the data source and * Requests cancellation of ingest, i.e., a shutdown of the data source
* file ingest pipelines. * level and file level ingest pipelines.
*/ */
void cancel() { void cancel() {
// Put a cancellation message on data source ingest progress bar, /**
// if it is still running. * Put a cancellation message on data source level ingest progress bar,
* if it is still running.
*/
synchronized (this.dataSourceIngestProgressLock) { synchronized (this.dataSourceIngestProgressLock) {
if (dataSourceIngestProgress != null) { if (dataSourceIngestProgress != null) {
final String displayName = NbBundle.getMessage(this.getClass(), final String displayName = NbBundle.getMessage(this.getClass(),
@ -477,8 +537,10 @@ final class IngestJob {
} }
} }
// Put a cancellation message on the file ingest progress bar, /**
// if it is still running. * Put a cancellation message on the file level ingest progress bar, if
* it is still running.
*/
synchronized (this.fileIngestProgressLock) { synchronized (this.fileIngestProgressLock) {
if (this.fileIngestProgress != null) { if (this.fileIngestProgress != null) {
final String displayName = NbBundle.getMessage(this.getClass(), final String displayName = NbBundle.getMessage(this.getClass(),
@ -493,14 +555,16 @@ final class IngestJob {
this.cancelled = true; this.cancelled = true;
/** /**
* Tell the task scheduler to cancel all pending tasks. * Tell the task scheduler to cancel all pending tasks, i.e., tasks not
* not being performed by an ingest thread.
*/ */
IngestJob.taskScheduler.cancelPendingTasksForIngestJob(this); IngestJob.taskScheduler.cancelPendingTasksForIngestJob(this);
this.checkForStageCompleted();
} }
/** /**
* Queries whether or not cancellation of ingest i.e., a shutdown of the * Queries whether or not cancellation of ingest i.e., a shutdown of the
* data source and file ingest pipelines, has been requested * data source level and file level ingest pipelines, has been requested.
* *
* @return True or false. * @return True or false.
*/ */
@ -508,85 +572,6 @@ final class IngestJob {
return this.cancelled; return this.cancelled;
} }
/**
* Creates the file and data source ingest pipelines.
*
* @param ingestModuleTemplates Ingest module templates to use to populate
* the pipelines.
*/
private void createIngestPipelines(List<IngestModuleTemplate> ingestModuleTemplates) {
// Make mappings of ingest module factory class names to templates.
Map<String, IngestModuleTemplate> dataSourceModuleTemplates = new HashMap<>();
Map<String, IngestModuleTemplate> fileModuleTemplates = new HashMap<>();
for (IngestModuleTemplate template : ingestModuleTemplates) {
if (template.isDataSourceIngestModuleTemplate()) {
dataSourceModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
if (template.isFileIngestModuleTemplate()) {
fileModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
}
// Use the mappings and the ingest pipelines configuration to create
// ordered lists of ingest module templates for each ingest pipeline.
IngestPipelinesConfiguration pipelineConfigs = IngestPipelinesConfiguration.getInstance();
List<IngestModuleTemplate> firstStageDataSourceModuleTemplates = this.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageOneDataSourceIngestPipelineConfig());
List<IngestModuleTemplate> fileIngestModuleTemplates = this.getConfiguredIngestModuleTemplates(fileModuleTemplates, pipelineConfigs.getFileIngestPipelineConfig());
List<IngestModuleTemplate> secondStageDataSourceModuleTemplates = this.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageTwoDataSourceIngestPipelineConfig());
// Add any module templates that were not specified in the pipeline
// configurations to an appropriate pipeline - either the first stage
// data source ingest pipeline or the file ingest pipeline.
for (IngestModuleTemplate template : dataSourceModuleTemplates.values()) {
firstStageDataSourceModuleTemplates.add(template);
}
for (IngestModuleTemplate template : fileModuleTemplates.values()) {
fileIngestModuleTemplates.add(template);
}
// Contruct the data source ingest pipelines.
this.firstStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, firstStageDataSourceModuleTemplates);
this.secondStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, secondStageDataSourceModuleTemplates);
this.dataSourceIngestPipeline = firstStageDataSourceIngestPipeline;
// Construct the file ingest pipelines.
try {
int numberOfFileIngestThreads = IngestManager.getInstance().getNumberOfFileIngestThreads();
for (int i = 0; i < numberOfFileIngestThreads; ++i) {
this.fileIngestPipelines.put(new FileIngestPipeline(this, fileIngestModuleTemplates));
}
} catch (InterruptedException ex) {
/**
* The current thread was interrupted while blocked on a full queue.
* Blocking should never happen here, but reset the interrupted flag
* rather than just swallowing the exception.
*/
Thread.currentThread().interrupt();
}
}
/**
* Use an ordered list of ingest module factory class names to create an
* ordered subset of a collection ingest module templates. The ingest module
* templates are removed from the input collection as they are added to the
* output collection.
*
* @param ingestModuleTemplates A mapping of ingest module factory class
* names to ingest module templates.
* @param pipelineConfig An ordered list of ingest module factory class
* names representing an ingest pipeline.
* @return
*/
List<IngestModuleTemplate> getConfiguredIngestModuleTemplates(Map<String, IngestModuleTemplate> ingestModuleTemplates, List<String> pipelineConfig) {
List<IngestModuleTemplate> templates = new ArrayList<>();
for (String moduleClassName : pipelineConfig) {
if (ingestModuleTemplates.containsKey(moduleClassName)) {
templates.add(ingestModuleTemplates.remove(moduleClassName));
}
}
return templates;
}
/** /**
* Starts up the ingest pipelines and ingest progress bars. * Starts up the ingest pipelines and ingest progress bars.
* *
@ -597,17 +582,102 @@ final class IngestJob {
List<IngestModuleError> errors = startUpIngestPipelines(); List<IngestModuleError> errors = startUpIngestPipelines();
if (errors.isEmpty()) { if (errors.isEmpty()) {
if (this.hasFirstStageDataSourceIngestPipeline() || this.hasFileIngestPipeline()) { if (this.hasFirstStageDataSourceIngestPipeline() || this.hasFileIngestPipeline()) {
// There is at least one first stage pipeline.
this.startFirstStage(); this.startFirstStage();
} else if (this.hasSecondStageDataSourceIngestPipeline()) { } else if (this.hasSecondStageDataSourceIngestPipeline()) {
// There is no first stage pipeline, but there is a second stage
// ingest pipeline.
this.startSecondStage(); this.startSecondStage();
} }
} }
return errors; return errors;
} }
/**
* Creates the file and data source ingest pipelines.
*
* @param ingestModuleTemplates Ingest module templates to use to populate
* the pipelines.
*/
private void createIngestPipelines(List<IngestModuleTemplate> ingestModuleTemplates) {
/**
* Make mappings of ingest module factory class names to templates.
*/
Map<String, IngestModuleTemplate> dataSourceModuleTemplates = new HashMap<>();
Map<String, IngestModuleTemplate> fileModuleTemplates = new HashMap<>();
for (IngestModuleTemplate template : ingestModuleTemplates) {
if (template.isDataSourceIngestModuleTemplate()) {
dataSourceModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
if (template.isFileIngestModuleTemplate()) {
fileModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
}
/**
* Use the mappings and the ingest pipelines configuration to create
* ordered lists of ingest module templates for each ingest pipeline.
*/
IngestPipelinesConfiguration pipelineConfigs = IngestPipelinesConfiguration.getInstance();
List<IngestModuleTemplate> firstStageDataSourceModuleTemplates = IngestJob.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageOneDataSourceIngestPipelineConfig());
List<IngestModuleTemplate> fileIngestModuleTemplates = IngestJob.getConfiguredIngestModuleTemplates(fileModuleTemplates, pipelineConfigs.getFileIngestPipelineConfig());
List<IngestModuleTemplate> secondStageDataSourceModuleTemplates = IngestJob.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageTwoDataSourceIngestPipelineConfig());
/**
* Add any module templates that were not specified in the pipelines
* configuration to an appropriate pipeline - either the first stage
* data source ingest pipeline or the file ingest pipeline.
*/
for (IngestModuleTemplate template : dataSourceModuleTemplates.values()) {
firstStageDataSourceModuleTemplates.add(template);
}
for (IngestModuleTemplate template : fileModuleTemplates.values()) {
fileIngestModuleTemplates.add(template);
}
/**
* Construct the data source ingest pipelines.
*/
this.firstStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, firstStageDataSourceModuleTemplates);
this.secondStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, secondStageDataSourceModuleTemplates);
/**
* Construct the file ingest pipelines, one per file ingest thread.
*/
try {
int numberOfFileIngestThreads = IngestManager.getInstance().getNumberOfFileIngestThreads();
for (int i = 0; i < numberOfFileIngestThreads; ++i) {
this.fileIngestPipelines.put(new FileIngestPipeline(this, fileIngestModuleTemplates));
}
} catch (InterruptedException ex) {
/**
* The current thread was interrupted while blocked on a full queue.
* Blocking should actually never happen here, but reset the
* interrupted flag rather than just swallowing the exception.
*/
Thread.currentThread().interrupt();
}
}
/**
* Use an ordered list of ingest module factory class names to create an
* ordered output list of ingest module templates for an ingest pipeline.
* The ingest module templates are removed from the input collection as they
* are added to the output collection.
*
* @param ingestModuleTemplates A mapping of ingest module factory class
* names to ingest module templates.
* @param pipelineConfig An ordered list of ingest module factory class
* names representing an ingest pipeline.
* @return
*/
private static List<IngestModuleTemplate> getConfiguredIngestModuleTemplates(Map<String, IngestModuleTemplate> ingestModuleTemplates, List<String> pipelineConfig) {
List<IngestModuleTemplate> templates = new ArrayList<>();
for (String moduleClassName : pipelineConfig) {
if (ingestModuleTemplates.containsKey(moduleClassName)) {
templates.add(ingestModuleTemplates.remove(moduleClassName));
}
}
return templates;
}
/** /**
* Starts the first stage of the job. * Starts the first stage of the job.
*/ */
@ -615,7 +685,7 @@ final class IngestJob {
this.stage = IngestJob.Stages.FIRST; this.stage = IngestJob.Stages.FIRST;
/** /**
* Start one or both of the first stage progress bars. * Start one or both of the first stage ingest progress bars.
*/ */
if (this.hasFirstStageDataSourceIngestPipeline()) { if (this.hasFirstStageDataSourceIngestPipeline()) {
this.startDataSourceIngestProgressBar(); this.startDataSourceIngestProgressBar();
@ -624,6 +694,14 @@ final class IngestJob {
this.startFileIngestProgressBar(); this.startFileIngestProgressBar();
} }
/**
* Make the first stage data source level ingest pipeline the current
* data source level pipeline.
*/
synchronized (this.dataSourceIngestPipelineLock) {
this.currentDataSourceIngestPipeline = this.firstStageDataSourceIngestPipeline;
}
/** /**
* Schedule the first stage tasks. * Schedule the first stage tasks.
*/ */
@ -639,11 +717,9 @@ final class IngestJob {
* it is possible, if unlikely, that no file ingest tasks were * it is possible, if unlikely, that no file ingest tasks were
* actually scheduled since there are files that get filtered out by * actually scheduled since there are files that get filtered out by
* the tasks scheduler. In this special case, an ingest thread will * the tasks scheduler. In this special case, an ingest thread will
* never get to make the following check for this stage of the job. * never to check for completion of this stage of the job.
*/ */
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) { this.checkForStageCompleted();
this.handleTasksCompleted();
}
} }
} }
@ -653,7 +729,9 @@ final class IngestJob {
private void startSecondStage() { private void startSecondStage() {
this.stage = IngestJob.Stages.SECOND; this.stage = IngestJob.Stages.SECOND;
this.startDataSourceIngestProgressBar(); this.startDataSourceIngestProgressBar();
this.dataSourceIngestPipeline = this.secondStageDataSourceIngestPipeline; synchronized (this.dataSourceIngestPipelineLock) {
this.currentDataSourceIngestPipeline = this.secondStageDataSourceIngestPipeline;
}
IngestJob.taskScheduler.scheduleDataSourceIngestTask(this); IngestJob.taskScheduler.scheduleDataSourceIngestTask(this);
} }
@ -669,7 +747,8 @@ final class IngestJob {
} }
/** /**
* Checks to see if this job has a first stage data source ingest pipeline. * Checks to see if this job has a first stage data source level ingest
* pipeline.
* *
* @return True or false. * @return True or false.
*/ */
@ -678,7 +757,8 @@ final class IngestJob {
} }
/** /**
* Checks to see if this job has a second stage data source ingest pipeline. * Checks to see if this job has a second stage data source level ingest
* pipeline.
* *
* @return True or false. * @return True or false.
*/ */
@ -687,7 +767,7 @@ final class IngestJob {
} }
/** /**
* Checks to see if the job has a file ingest pipeline. * Checks to see if the job has a file level ingest pipeline.
* *
* @return True or false. * @return True or false.
*/ */
@ -696,8 +776,8 @@ final class IngestJob {
} }
/** /**
* Starts up each of the file and data source ingest modules to collect * Starts up each of the file and data source level ingest modules to
* possible errors. * collect possible errors.
* *
* @return A collection of ingest module startup errors, empty on success. * @return A collection of ingest module startup errors, empty on success.
*/ */
@ -705,7 +785,7 @@ final class IngestJob {
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
// Start up the first stage data source ingest pipeline. // Start up the first stage data source ingest pipeline.
errors.addAll(this.dataSourceIngestPipeline.startUp()); errors.addAll(this.firstStageDataSourceIngestPipeline.startUp());
// Start up the second stage data source ingest pipeline. // Start up the second stage data source ingest pipeline.
errors.addAll(this.secondStageDataSourceIngestPipeline.startUp()); errors.addAll(this.secondStageDataSourceIngestPipeline.startUp());
@ -738,7 +818,7 @@ final class IngestJob {
} }
/** /**
* Starts the data source ingest progress bar. * Starts the data source level ingest progress bar.
*/ */
private void startDataSourceIngestProgressBar() { private void startDataSourceIngestProgressBar() {
synchronized (this.dataSourceIngestProgressLock) { synchronized (this.dataSourceIngestProgressLock) {
@ -771,7 +851,7 @@ final class IngestJob {
} }
/** /**
* Starts the file ingest progress bar. * Starts the file level ingest progress bar.
*/ */
private void startFileIngestProgressBar() { private void startFileIngestProgressBar() {
synchronized (this.fileIngestProgressLock) { synchronized (this.fileIngestProgressLock) {
@ -796,17 +876,21 @@ final class IngestJob {
} }
/** /**
* Handles when all ingest tasks for this job are completed by finishing the * Checks to see if the ingest tasks for the current stage are completed and
* current stage and possibly starting the next stage. * does a stage transition if they are.
*/ */
private void handleTasksCompleted() { private void checkForStageCompleted() {
switch (this.stage) { synchronized (this.stageCompletionCheckLock) {
case FIRST: if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) {
this.finishFirstStage(); switch (this.stage) {
break; case FIRST:
case SECOND: this.finishFirstStage();
this.finish(); break;
break; case SECOND:
this.finish();
break;
}
}
} }
} }
@ -859,6 +943,8 @@ final class IngestJob {
* Shuts down the ingest pipelines and progress bars for this job. * Shuts down the ingest pipelines and progress bars for this job.
*/ */
private void finish() { private void finish() {
this.stage = IngestJob.Stages.FINALIZATION;
// Finish the second stage data source ingest progress bar, if it hasn't // Finish the second stage data source ingest progress bar, if it hasn't
// already been finished. // already been finished.
synchronized (this.dataSourceIngestProgressLock) { synchronized (this.dataSourceIngestProgressLock) {
@ -890,8 +976,8 @@ final class IngestJob {
} }
/** /**
* Requests a temporary cancellation of data source ingest in order to stop * Requests a temporary cancellation of data source level ingest in order to
* the currently executing data source ingest module. * stop the currently executing data source ingest module.
*/ */
private void cancelCurrentDataSourceIngestModule() { private void cancelCurrentDataSourceIngestModule() {
this.currentDataSourceIngestModuleCancelled = true; this.currentDataSourceIngestModuleCancelled = true;
@ -902,8 +988,9 @@ final class IngestJob {
* *
* @return An ingest job statistics object. * @return An ingest job statistics object.
*/ */
private IngestJobSnapshot getIngestJobSnapshot() { private IngestJobSnapshot getSnapshot() {
return new IngestJobSnapshot(); return new IngestJobSnapshot();
} }
/** /**
@ -932,19 +1019,36 @@ final class IngestJob {
this.estimatedFilesToProcess = IngestJob.this.estimatedFilesToProcess; this.estimatedFilesToProcess = IngestJob.this.estimatedFilesToProcess;
this.snapShotTime = new Date().getTime(); this.snapShotTime = new Date().getTime();
} }
/**
* Get a snapshot of the tasks currently in progress for this job.
*/
this.tasksSnapshot = IngestJob.taskScheduler.getTasksSnapshotForJob(this.jobId); this.tasksSnapshot = IngestJob.taskScheduler.getTasksSnapshotForJob(this.jobId);
} }
/**
* Gets the identifier of the ingest job that is the subject of this
* snapshot.
*
* @return The ingest job id.
*/
long getJobId() { long getJobId() {
return this.jobId; return this.jobId;
} }
/**
* Gets the name of the data source associated with the ingest job that
* is the subject of this snapshot.
*
* @return A data source name string.
*/
String getDataSource() { String getDataSource() {
return dataSource; return dataSource;
} }
/** /**
* Gets files per second throughput since job started. * Gets files per second throughput since the ingest job that is the
* subject of this snapshot started.
* *
* @return Files processed per second (approximate). * @return Files processed per second (approximate).
*/ */
@ -953,7 +1057,7 @@ final class IngestJob {
} }
/** /**
* Gets the the ingest job was started. * Gets the time the ingest job was started.
* *
* @return The start time as number of milliseconds since January 1, * @return The start time as number of milliseconds since January 1,
* 1970, 00:00:00 GMT. * 1970, 00:00:00 GMT.

View File

@ -30,6 +30,9 @@ import javax.swing.table.TableColumn;
import org.apache.commons.lang3.time.DurationFormatUtils; import org.apache.commons.lang3.time.DurationFormatUtils;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
/**
* A panel that displays ingest task progress snapshots.
*/
public class IngestProgressSnapshotPanel extends javax.swing.JPanel { public class IngestProgressSnapshotPanel extends javax.swing.JPanel {
private final JDialog parent; private final JDialog parent;

View File

@ -16,7 +16,7 @@ Contains only the core ingest modules that ship with Autopsy -->
<MODULE>org.sleuthkit.autopsy.thunderbirdparser.EmailParserModuleFactory</MODULE> <MODULE>org.sleuthkit.autopsy.thunderbirdparser.EmailParserModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleFactory</MODULE> <MODULE>org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.interestingitems.InterestingItemsIngestModuleFactory</MODULE> <MODULE>org.sleuthkit.autopsy.modules.interestingitems.InterestingItemsIngestModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.photoreccarver.PhotoRecCarverIngestModuleFactory</MODULE> <MODULE>org.sleuthkit.autopsy.modules.photoreccarver.PhotoRecCarverIngestModuleFactory</MODULE>
</PIPELINE> </PIPELINE>
<PIPELINE type="ImageAnalysisStageTwo"> <PIPELINE type="ImageAnalysisStageTwo">

View File

@ -216,7 +216,7 @@ final class PhotoRecCarverFileIngestModule implements FileIngestModule {
PhotoRecCarverOutputParser parser = new PhotoRecCarverOutputParser(outputDirPath); PhotoRecCarverOutputParser parser = new PhotoRecCarverOutputParser(outputDirPath);
List<LayoutFile> theList = parser.parse(newAuditFile, id, file); List<LayoutFile> theList = parser.parse(newAuditFile, id, file);
if (theList != null) { // if there were any results from carving, add the unallocated carving event to the reports list. if (theList != null) { // if there were any results from carving, add the unallocated carving event to the reports list.
context.scheduleFiles(new ArrayList<>(theList)); context.addFilesToJob(new ArrayList<>(theList));
} }
} }
catch (IOException ex) { catch (IOException ex) {

View File

@ -181,7 +181,7 @@ public final class SevenZipIngestModule implements FileIngestModule {
//currently sending a single event for all new files //currently sending a single event for all new files
services.fireModuleContentEvent(new ModuleContentEvent(abstractFile)); services.fireModuleContentEvent(new ModuleContentEvent(abstractFile));
context.scheduleFiles(unpackedFiles); context.addFilesToJob(unpackedFiles);
} }
return ProcessResult.OK; return ProcessResult.OK;

View File

@ -2,7 +2,7 @@
* *
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2013 Basis Technology Corp. * Copyright 2013-2014 Basis Technology Corp.
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -48,7 +48,14 @@ public final class ReportBranding implements ReportBrandingProviderI {
private String reportsBrandingDir; //dir with extracted reports branding resources private String reportsBrandingDir; //dir with extracted reports branding resources
private static final String MODULE_NAME = ReportBranding.class.getSimpleName(); private static final String MODULE_NAME = ReportBranding.class.getSimpleName();
private static final Logger logger = Logger.getLogger(ReportBranding.class.getName()); private static final Logger logger = Logger.getLogger(ReportBranding.class.getName());
private static String generatorLogoPath;
// this is static so that it can be set by another object
// before the report is actually made. Entire class should
// probably become singleton. Is set to null until setPath
// is called to specify something other than default.
private static String generatorLogoPath = null;
private String defaultGeneratorLogoPath;
public ReportBranding() { public ReportBranding() {
@ -64,7 +71,7 @@ public final class ReportBranding implements ReportBrandingProviderI {
//TODO use defaults //TODO use defaults
} }
} }
extractGeneratorLogo(); extractDefaultGeneratorLogo();
getAgencyLogoPath(); getAgencyLogoPath();
getReportTitle(); getReportTitle();
} }
@ -74,29 +81,40 @@ public final class ReportBranding implements ReportBrandingProviderI {
return reportsBrandingDir; return reportsBrandingDir;
} }
private void extractGeneratorLogo() { /**
* extract default logo from JAR file to local file.
*/
private void extractDefaultGeneratorLogo() {
try { try {
PlatformUtil.extractResourceToUserConfigDir(getClass(), DEFAULT_GENERATOR_LOGO, true); PlatformUtil.extractResourceToUserConfigDir(getClass(), DEFAULT_GENERATOR_LOGO, true);
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, "Error extracting report branding resource for generator logo ", ex); //NON-NLS logger.log(Level.SEVERE, "Error extracting report branding resource for generator logo ", ex); //NON-NLS
} }
generatorLogoPath = PlatformUtil.getUserConfigDirectory() + File.separator + DEFAULT_GENERATOR_LOGO; defaultGeneratorLogoPath = PlatformUtil.getUserConfigDirectory() + File.separator + DEFAULT_GENERATOR_LOGO;
} }
@Override @Override
public String getGeneratorLogoPath() { public String getGeneratorLogoPath() {
// if no one called to change the path, use default
if (generatorLogoPath == null)
generatorLogoPath = defaultGeneratorLogoPath;
return generatorLogoPath; return generatorLogoPath;
} }
@Override @Override
public void setGeneratorLogoPath(String path) { public void setGeneratorLogoPath(String path) {
generatorLogoPath = path;
} }
@Override @Override
public String getAgencyLogoPath() { public String getAgencyLogoPath() {
String curPath = null; String curPath = null;
/* The agency logo code uses these properties to persist changes
* in the logo (within the same process).
* This is different from the generator logo that uses a static variable.
*/
curPath = ModuleSettings.getConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP); curPath = ModuleSettings.getConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP);
//if has been set, validate it's correct, if not set, return null //if has been set, validate it's correct, if not set, return null
if (curPath != null && new File(curPath).canRead() == false) { if (curPath != null && new File(curPath).canRead() == false) {
@ -110,6 +128,8 @@ public final class ReportBranding implements ReportBrandingProviderI {
@Override @Override
public void setAgencyLogoPath(String path) { public void setAgencyLogoPath(String path) {
// Use properties to persist the logo to use.
// Should use static variable instead
ModuleSettings.setConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP, path); ModuleSettings.setConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP, path);
} }

View File

@ -247,8 +247,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
return; return;
} }
if (context.isJobCancelled()) { if (context.fileIngestIsCancelled()) {
logger.log(Level.INFO, "Ingest job cancelled"); //NON-NLS
stop(); stop();
return; return;
} }

View File

@ -128,7 +128,7 @@ class Chrome extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -204,7 +204,7 @@ class Chrome extends Extract {
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); //NON-NLS
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -341,7 +341,7 @@ class Chrome extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -416,7 +416,7 @@ class Chrome extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -504,7 +504,7 @@ class Chrome extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }

View File

@ -52,6 +52,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.*; import org.sleuthkit.datamodel.*;
@ -111,7 +112,7 @@ class ExtractIE extends Extract {
continue; continue;
} }
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
break; break;
} }
@ -201,7 +202,7 @@ class ExtractIE extends Extract {
dataFound = true; dataFound = true;
for (AbstractFile cookiesFile : cookiesFiles) { for (AbstractFile cookiesFile : cookiesFiles) {
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
break; break;
} }
if (cookiesFile.getSize() == 0) { if (cookiesFile.getSize() == 0) {
@ -309,7 +310,7 @@ class ExtractIE extends Extract {
//indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat"; //indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat";
temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; //NON-NLS temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; //NON-NLS
File datFile = new File(temps); File datFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
break; break;
} }
try { try {
@ -324,6 +325,9 @@ class ExtractIE extends Extract {
String filename = "pasco2Result." + indexFile.getId() + ".txt"; //NON-NLS String filename = "pasco2Result." + indexFile.getId() + ".txt"; //NON-NLS
boolean bPascProcSuccess = executePasco(temps, filename); boolean bPascProcSuccess = executePasco(temps, filename);
if (context.dataSourceIngestIsCancelled()) {
return;
}
//At this point pasco2 proccessed the index files. //At this point pasco2 proccessed the index files.
//Now fetch the results, parse them and the delete the files. //Now fetch the results, parse them and the delete the files.
@ -354,34 +358,26 @@ class ExtractIE extends Extract {
*/ */
private boolean executePasco(String indexFilePath, String outputFileName) { private boolean executePasco(String indexFilePath, String outputFileName) {
boolean success = true; boolean success = true;
Writer writer = null;
ExecUtil execPasco = new ExecUtil();
try { try {
final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName; final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName;
final String errFileFullPath = moduleTempResultsDir + File.separator + outputFileName + ".err";
logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath); //NON-NLS logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath); //NON-NLS
writer = new FileWriter(outputFileFullPath); List<String> commandLine = new ArrayList<>();
execPasco.execute(writer, JAVA_PATH, commandLine.add(JAVA_PATH);
"-cp", PASCO_LIB_PATH, //NON-NLS commandLine.add("-cp"); //NON-NLS
"isi.pasco2.Main", "-T", "history", indexFilePath ); //NON-NLS commandLine.add(PASCO_LIB_PATH);
commandLine.add("isi.pasco2.Main"); //NON-NLS
commandLine.add("-T"); //NON-NLS
commandLine.add("history"); //NON-NLS
commandLine.add(indexFilePath);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectOutput(new File(outputFileFullPath));
processBuilder.redirectError(new File(errFileFullPath));
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
// @@@ Investigate use of history versus cache as type. // @@@ Investigate use of history versus cache as type.
} catch (IOException ex) { } catch (IOException ex) {
success = false; success = false;
logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); //NON-NLS logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); //NON-NLS
} catch (InterruptedException ex) {
success = false;
logger.log(Level.SEVERE, "Pasco has been interrupted, failed to extract some web history from Internet Explorer.", ex); //NON-NLS
}
finally {
if (writer != null) {
try {
writer.flush();
writer.close();
} catch (IOException ex) {
logger.log(Level.WARNING, "Error closing writer stream after for Pasco result", ex); //NON-NLS
}
}
execPasco.stop();
} }
return success; return success;
} }

View File

@ -37,6 +37,7 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.recentactivity.UsbDeviceIdMapper.USBInfo; import org.sleuthkit.autopsy.recentactivity.UsbDeviceIdMapper.USBInfo;
import org.sleuthkit.datamodel.*; import org.sleuthkit.datamodel.*;
@ -62,14 +63,10 @@ class ExtractRegistry extends Extract {
private String RR_FULL_PATH; private String RR_FULL_PATH;
private boolean rrFound = false; // true if we found the Autopsy-specific version of regripper private boolean rrFound = false; // true if we found the Autopsy-specific version of regripper
private boolean rrFullFound = false; // true if we found the full version of regripper private boolean rrFullFound = false; // true if we found the full version of regripper
final private static String MODULE_VERSION = "1.0";
private Content dataSource; private Content dataSource;
private IngestJobContext context; private IngestJobContext context;
final private static UsbDeviceIdMapper usbMapper = new UsbDeviceIdMapper(); final private static UsbDeviceIdMapper usbMapper = new UsbDeviceIdMapper();
//hide public constructor to prevent from instantiation by ingest module loader
ExtractRegistry() { ExtractRegistry() {
moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text"); moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text");
final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); //NON-NLS final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); //NON-NLS
@ -169,7 +166,7 @@ class ExtractRegistry extends Extract {
continue; continue;
} }
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
break; break;
} }
@ -182,10 +179,9 @@ class ExtractRegistry extends Extract {
logger.log(Level.SEVERE, null, ex); logger.log(Level.SEVERE, null, ex);
} }
logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); //NON-NLS logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{moduleName, regFileNameLocal}); //NON-NLS
RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); RegOutputFiles regOutputFiles = ripRegistryFile(regFileNameLocal, outputPathBase);
if (context.dataSourceIngestIsCancelled()) {
if (context.isJobCancelled()) {
break; break;
} }
@ -268,9 +264,9 @@ class ExtractRegistry extends Extract {
* @param regFilePath Path to local copy of registry * @param regFilePath Path to local copy of registry
* @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on * @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on
*/ */
private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) { private RegOutputFiles ripRegistryFile(String regFilePath, String outFilePathBase) {
String autopsyType = ""; // Type argument for rr for autopsy-specific modules String autopsyType = ""; // Type argument for rr for autopsy-specific modules
String fullType = ""; // Type argument for rr for full set of modules String fullType; // Type argument for rr for full set of modules
RegOutputFiles regOutputFiles = new RegOutputFiles(); RegOutputFiles regOutputFiles = new RegOutputFiles();
@ -298,78 +294,44 @@ class ExtractRegistry extends Extract {
// run the autopsy-specific set of modules // run the autopsy-specific set of modules
if (!autopsyType.isEmpty() && rrFound) { if (!autopsyType.isEmpty() && rrFound) {
// TODO - add error messages regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; //NON-NLS
Writer writer = null; String errFilePath = outFilePathBase + "-autopsy.err.txt"; //NON-NLS
ExecUtil execRR = null; logger.log(Level.INFO, "Writing RegRipper results to: {0}", regOutputFiles.autopsyPlugins); //NON-NLS
try { executeRegRipper(regFilePath, autopsyType, regOutputFiles.autopsyPlugins, errFilePath);
regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; //NON-NLS }
logger.log(Level.INFO, "Writing RegRipper results to: " + regOutputFiles.autopsyPlugins); //NON-NLS if (context.dataSourceIngestIsCancelled()) {
writer = new FileWriter(regOutputFiles.autopsyPlugins); return regOutputFiles;
execRR = new ExecUtil();
execRR.execute(writer, RR_PATH,
"-r", regFilePath, "-f", autopsyType); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to RegRipper and process parse some registry files.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile",
this.getName()));
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "RegRipper has been interrupted, failed to parse registry.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile2",
this.getName()));
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error closing output writer after running RegRipper", ex); //NON-NLS
}
}
if (execRR != null) {
execRR.stop();
}
}
} }
// run the full set of rr modules // run the full set of rr modules
if (!fullType.isEmpty() && rrFullFound) { if (!fullType.isEmpty() && rrFullFound) {
Writer writer = null; regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; //NON-NLS
ExecUtil execRR = null; String errFilePath = outFilePathBase + "-full.err.txt"; //NON-NLS
try { logger.log(Level.INFO, "Writing Full RegRipper results to: {0}", regOutputFiles.fullPlugins); //NON-NLS
regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; //NON-NLS executeRegRipper(regFilePath, fullType, regOutputFiles.fullPlugins, errFilePath);
logger.log(Level.INFO, "Writing Full RegRipper results to: " + regOutputFiles.fullPlugins); //NON-NLS
writer = new FileWriter(regOutputFiles.fullPlugins);
execRR = new ExecUtil();
execRR.execute(writer, RR_FULL_PATH,
"-r", regFilePath, "-f", fullType); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to run full RegRipper and process parse some registry files.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile3",
this.getName()));
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "RegRipper full has been interrupted, failed to parse registry.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile4",
this.getName()));
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error closing output writer after running RegRipper full", ex); //NON-NLS
}
}
if (execRR != null) {
execRR.stop();
}
}
} }
return regOutputFiles; return regOutputFiles;
} }
private void executeRegRipper(String hiveFilePath, String hiveFileType, String outputFile, String errFile) {
try {
logger.log(Level.INFO, "Writing RegRipper results to: {0}", outputFile); //NON-NLS
List<String> commandLine = new ArrayList<>();
commandLine.add(RR_PATH);
commandLine.add("-r"); //NON-NLS
commandLine.add(hiveFilePath);
commandLine.add("-f"); //NON-NLS
commandLine.add(hiveFileType);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectOutput(new File(outputFile));
processBuilder.redirectError(new File(errFile));
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to run RegRipper", ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName()));
}
}
// @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT // @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT
/** /**
* *
@ -558,7 +520,7 @@ class ExtractRegistry extends Extract {
} }
break; break;
default: default:
logger.log(Level.WARNING, "Unercognized node name: " + dataType); logger.log(Level.WARNING, "Unrecognized node name: {0}", dataType);
break; break;
} }
} }

View File

@ -116,7 +116,7 @@ class Firefox extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -197,7 +197,7 @@ class Firefox extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -277,7 +277,7 @@ class Firefox extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
@ -385,16 +385,16 @@ class Firefox extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }
List<HashMap<String, Object>> tempList = this.dbConnect(temps, downloadQuery); List<HashMap<String, Object>> tempList = this.dbConnect(temps, downloadQuery);
logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); //NON-NLS logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) { for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(), NbBundle.getMessage(this.getClass(),
@ -494,7 +494,7 @@ class Firefox extends Extract {
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
break; break;
} }

View File

@ -93,7 +93,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
for (int i = 0; i < extracters.size(); i++) { for (int i = 0; i < extracters.size(); i++) {
Extract extracter = extracters.get(i); Extract extracter = extracters.get(i);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS
break; break;
} }
@ -161,7 +161,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
historyMsg.toString()); historyMsg.toString());
services.postMessage(inboxMsg); services.postMessage(inboxMsg);
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK; return ProcessResult.OK;
} }

View File

@ -79,7 +79,7 @@ class RecentDocumentsByLnk extends Extract {
dataFound = true; dataFound = true;
for (AbstractFile recentFile : recentFiles) { for (AbstractFile recentFile : recentFiles) {
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
break; break;
} }

View File

@ -278,7 +278,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS
for (BlackboardArtifact artifact : listArtifacts) { for (BlackboardArtifact artifact : listArtifacts) {
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
break; //User cancled the process. break; //User cancled the process.
} }
@ -346,7 +346,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
} catch (TskCoreException e) { } catch (TskCoreException e) {
logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); //NON-NLS logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); //NON-NLS
} finally { } finally {
if (context.isJobCancelled()) { if (context.dataSourceIngestIsCancelled()) {
logger.info("Operation terminated by user."); //NON-NLS logger.info("Operation terminated by user."); //NON-NLS
} }
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent( IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(

View File

@ -223,7 +223,7 @@ class ScalpelCarverIngestModule implements FileIngestModule {
} }
// reschedule carved files // reschedule carved files
context.scheduleFiles(new ArrayList<AbstractFile>(carvedFiles)); context.addFilesToJob(new ArrayList<AbstractFile>(carvedFiles));
return ProcessResult.OK; return ProcessResult.OK;
} }

View File

@ -289,7 +289,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
services.fireModuleContentEvent(new ModuleContentEvent(derived)); services.fireModuleContentEvent(new ModuleContentEvent(derived));
} }
} }
context.scheduleFiles(derivedFiles); context.addFilesToJob(derivedFiles);
services.fireModuleDataEvent(new ModuleDataEvent(EmailParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG)); services.fireModuleDataEvent(new ModuleDataEvent(EmailParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG));
} }