Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Karl Mortensen 2014-11-07 08:28:42 -05:00
commit 5f13af169f
20 changed files with 446 additions and 353 deletions

View File

@ -149,7 +149,10 @@ public final class ExecUtil {
logger.log(Level.WARNING, "Error occurred when attempting to kill process: {0}", ex.getMessage()); // NON-NLS
}
}
/**
* EVERYTHING FOLLOWING THIS LINE IS DEPRECATED AND SLATED FOR REMOVAL
*/
private static final Logger logger = Logger.getLogger(ExecUtil.class.getName());
private Process proc = null;
private ExecUtil.StreamToStringRedirect errorStringRedirect = null;

View File

@ -52,6 +52,7 @@ import org.sleuthkit.autopsy.externalresults.ExternalResults;
import org.sleuthkit.autopsy.externalresults.ExternalResultsImporter;
import org.sleuthkit.autopsy.externalresults.ExternalResultsXMLParser;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
@ -73,15 +74,15 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final String moduleName = SampleExecutableIngestModuleFactory.getModuleName();
private final String fileInCaseDatabase = "/WINDOWS/system32/ntmsapi.dll"; // Probably
private long jobId;
private final String fileInCaseDatabase = "/WINDOWS/system32/ntmsapi.dll"; // Probably
private IngestJobContext context;
private String outputDirPath;
private String derivedFileInCaseDatabase;
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
if (refCounter.incrementAndGet(jobId) == 1) {
this.context = context;
if (refCounter.incrementAndGet(context.getJobId()) == 1) {
// Create an output directory for this job.
outputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator + moduleName; //NON-NLS
File outputDir = new File(outputDirPath);
@ -93,7 +94,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
if (refCounter.get(jobId) == 1) {
if (refCounter.get(context.getJobId()) == 1) {
try {
// There will be two tasks: data source analysis and import of
// the results of the analysis.
@ -108,14 +109,18 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
// derived files, and reports generated by the analysis. In this
// sample ingest module, the generation of the analysis results is
// simulated.
String resultsFilePath = outputDirPath + File.separator + String.format("job_%d_results.xml", jobId);
String resultsFilePath = outputDirPath + File.separator + String.format("job_%d_results.xml", context.getJobId());
boolean haveRealExecutable = false;
if (haveRealExecutable) {
if (dataSource instanceof Image) {
Image image = (Image)dataSource;
String dataSourcePath = image.getPaths()[0];
ExecUtil executor = new ExecUtil();
executor.execute("some.exe", dataSourcePath, resultsFilePath);
List<String> commandLine = new ArrayList<>();
commandLine.add("some.exe");
commandLine.add(dataSourcePath);
commandLine.add(resultsFilePath);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
}
// not a disk image
else {
@ -136,7 +141,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
IngestServices.getInstance().postMessage(IngestMessage.createErrorMessage(moduleName, "External Results Import Error", errorInfo.getMessage()));
}
progressBar.progress(2);
} catch (InterruptedException | ParserConfigurationException | TransformerException | IOException ex) {
} catch (ParserConfigurationException | TransformerException | IOException ex) {
Logger logger = IngestServices.getInstance().getLogger(moduleName);
logger.log(Level.SEVERE, "Failed to simulate analysis and results import", ex); //NON-NLS
return ProcessResult.ERROR;
@ -155,7 +160,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
List<String> filePaths = new ArrayList<>();
String fileContents = "This is a simulated derived file.";
for (int i = 0; i < 2; ++i) {
String fileName = String.format("job_%d_derived_file_%d.txt", jobId, i);
String fileName = String.format("job_%d_derived_file_%d.txt", context.getJobId(), i);
filePaths.add(generateFile(fileName, fileContents.getBytes()));
if (i == 0) {
this.derivedFileInCaseDatabase = this.fileInCaseDatabase + "/" + fileName;
@ -168,7 +173,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
List<String> filePaths = new ArrayList<>();
String fileContents = "This is a simulated report.";
for (int i = 0; i < 2; ++i) {
String fileName = String.format("job_%d_report_%d.txt", jobId, i);
String fileName = String.format("job_%d_report_%d.txt", context.getJobId(), i);
filePaths.add(generateFile(fileName, fileContents.getBytes()));
}
return filePaths;

View File

@ -71,14 +71,14 @@ public class DataSourceIngestModuleProgress {
}
/**
* Updates the progress bar with the number of work units performed, if in
* the determinate mode.
* Updates the progress bar with a new task name and the number of work
* units performed, if in the determinate mode.
*
* @param message Message to display in sub-title
* @param currentTask The task name.
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(String message, int workUnits) {
this.job.advanceDataSourceIngestProgressBar(message, workUnits);
public void progress(String currentTask, int workUnits) {
this.job.advanceDataSourceIngestProgressBar(currentTask, workUnits);
}
}

View File

@ -71,6 +71,7 @@ final class DataSourceIngestPipeline {
"IngestJob.progress.dataSourceIngest.displayName",
module.getDisplayName(), dataSource.getName());
this.job.updateDataSourceIngestProgressBarDisplayName(displayName);
this.job.switchDataSourceIngestProgressBarToIndeterminate();
ingestManager.setIngestTaskProgress(task, module.getDisplayName());
module.process(dataSource, new DataSourceIngestModuleProgress(this.job));
} catch (Exception ex) { // Catch-all exception firewall

View File

@ -42,22 +42,12 @@ import org.sleuthkit.datamodel.Content;
*/
final class IngestJob {
/**
* An ingest job may have multiple stages.
*/
private enum Stages {
/**
* High priority data source ingest modules and file ingest modules.
*/
FIRST,
/**
* Lower priority, usually long-running, data source ingest modules.
*/
SECOND
};
private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
/**
* The task scheduler singleton is responsible for creating and scheduling
* the ingest tasks that make up ingest jobs.
*/
private static final IngestTasksScheduler taskScheduler = IngestTasksScheduler.getInstance();
/**
@ -69,34 +59,66 @@ final class IngestJob {
private static final ConcurrentHashMap<Long, IngestJob> jobsById = new ConcurrentHashMap<>();
/**
* These fields define the ingest job and the work it entails.
* These fields define the ingest job, including its ingest pipelines. Note
* that there is a collection of multiple copies of the file ingest
* pipeline, one for each file ingest thread.
*/
private final long id;
private final Content dataSource;
private final boolean processUnallocatedSpace;
private Stages stage;
private DataSourceIngestPipeline dataSourceIngestPipeline;
private final Object dataSourceIngestPipelineLock;
private DataSourceIngestPipeline firstStageDataSourceIngestPipeline;
private DataSourceIngestPipeline secondStageDataSourceIngestPipeline;
private DataSourceIngestPipeline currentDataSourceIngestPipeline;
private final LinkedBlockingQueue<FileIngestPipeline> fileIngestPipelines;
/**
* These fields are used to update ingest progress UI components for the
* job. The filesInProgress collection contains the names of the files that
* are in the file ingest pipelines and the two file counter fields are used
* to update the file ingest progress bar.
* An ingest runs in stages.
*/
private static enum Stages {
/**
* Setting up for processing.
*/
INITIALIZATION,
/**
* Running high priority data source level ingest modules and file level
* ingest modules.
*/
FIRST,
/**
* Running lower priority, usually long-running, data source level
* ingest modules.
*/
SECOND,
/**
* Cleaning up.
*/
FINALIZATION
};
private Stages stage;
private final Object stageCompletionCheckLock;
/**
* These fields are used to provide data source level task progress bars for
* the job.
*/
private ProgressHandle dataSourceIngestProgress;
private final Object dataSourceIngestProgressLock;
private ProgressHandle dataSourceIngestProgress;
/**
* These fields are used to provide file level ingest task progress bars for
* the job.
*/
private final Object fileIngestProgressLock;
private final List<String> filesInProgress;
private long estimatedFilesToProcess;
private long processedFiles;
private ProgressHandle fileIngestProgress;
private final Object fileIngestProgressLock;
/**
* These fields support cancellation of either the currently running data
* source ingest module or the entire ingest job.
* source level ingest module or the entire ingest job.
*/
private volatile boolean currentDataSourceIngestModuleCancelled;
private volatile boolean cancelled;
@ -159,7 +181,7 @@ final class IngestJob {
static List<IngestJobSnapshot> getJobSnapshots() {
List<IngestJobSnapshot> snapShots = new ArrayList<>();
for (IngestJob job : IngestJob.jobsById.values()) {
snapShots.add(job.getIngestJobSnapshot());
snapShots.add(job.getSnapshot());
}
return snapShots;
}
@ -185,11 +207,13 @@ final class IngestJob {
this.id = id;
this.dataSource = dataSource;
this.processUnallocatedSpace = processUnallocatedSpace;
this.stage = IngestJob.Stages.FIRST;
this.dataSourceIngestPipelineLock = new Object();
this.fileIngestPipelines = new LinkedBlockingQueue<>();
this.filesInProgress = new ArrayList<>();
this.dataSourceIngestProgressLock = new Object();
this.fileIngestProgressLock = new Object();
this.stage = IngestJob.Stages.INITIALIZATION;
this.stageCompletionCheckLock = new Object();
this.startTime = new Date().getTime();
}
@ -205,15 +229,15 @@ final class IngestJob {
/**
* Gets the data source to be ingested by this job.
*
* @return A reference to a Content object representing the data source.
* @return A Content object representing the data source.
*/
Content getDataSource() {
return this.dataSource;
}
/**
* Queries whether or not unallocated space should be processed as part of
* this job.
* Gets whether or not unallocated space should be processed as part of this
* job.
*
* @return True or false.
*/
@ -222,22 +246,30 @@ final class IngestJob {
}
/**
* Passes the data source for this job through a data source ingest
* pipeline.
* Passes the data source for this job through the currently active data
* source level ingest pipeline.
*
* @param task A data source ingest task wrapping the data source.
*/
void process(DataSourceIngestTask task) {
try {
if (!this.isCancelled() && !this.dataSourceIngestPipeline.isEmpty()) {
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(this.dataSourceIngestPipeline.process(task));
if (!errors.isEmpty()) {
logIngestModuleErrors(errors);
synchronized (this.dataSourceIngestPipelineLock) {
if (!this.isCancelled() && !this.currentDataSourceIngestPipeline.isEmpty()) {
/**
* Run the data source through the pipeline.
*/
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(this.currentDataSourceIngestPipeline.process(task));
if (!errors.isEmpty()) {
logIngestModuleErrors(errors);
}
}
}
// Shut down the data source ingest progress bar right away.
/**
* Shut down the data source ingest progress bar right away. Data
* source-level processing is finished for this stage.
*/
synchronized (this.dataSourceIngestProgressLock) {
if (null != this.dataSourceIngestProgress) {
this.dataSourceIngestProgress.finish();
@ -245,21 +277,22 @@ final class IngestJob {
}
}
} finally {
// No matter what happens, let the task scheduler know that this
// task is completed and check for job completion.
/**
* No matter what happens, do ingest task bookkeeping.
*/
IngestJob.taskScheduler.notifyTaskCompleted(task);
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) {
this.handleTasksCompleted();
}
this.checkForStageCompleted();
}
}
/**
* Passes the a file from the data source for this job through the file
* Passes a file from the data source for this job through the file level
* ingest pipeline.
*
* @param task A file ingest task.
* @throws InterruptedException
* @throws InterruptedException if the thread executing this code is
* interrupted while blocked on taking from or putting to the file ingest
* pipelines collection.
*/
void process(FileIngestTask task) throws InterruptedException {
try {
@ -275,7 +308,9 @@ final class IngestJob {
*/
AbstractFile file = task.getFile();
// Update the file ingest progress bar.
/**
* Update the file ingest progress bar.
*/
synchronized (this.fileIngestProgressLock) {
++this.processedFiles;
if (this.processedFiles <= this.estimatedFilesToProcess) {
@ -286,15 +321,19 @@ final class IngestJob {
this.filesInProgress.add(file.getName());
}
// Run the file through the pipeline.
/**
* Run the file through the pipeline.
*/
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(pipeline.process(task));
if (!errors.isEmpty()) {
logIngestModuleErrors(errors);
}
// Update the file ingest progress bar again in case the
// file was being displayed.
/**
* Update the file ingest progress bar again, in case the
* file was being displayed.
*/
if (!this.cancelled) {
synchronized (this.fileIngestProgressLock) {
this.filesInProgress.remove(file.getName());
@ -307,27 +346,33 @@ final class IngestJob {
}
}
// Relinquish the pipeline so it can be reused by another file
// ingest thread.
/**
* Relinquish the pipeline so it can be reused by another file
* ingest thread.
*/
this.fileIngestPipelines.put(pipeline);
}
} finally {
// No matter what happens, let the task scheduler know that this
// task is completed and check for job completion.
/**
* No matter what happens, do ingest task bookkeeping.
*/
IngestJob.taskScheduler.notifyTaskCompleted(task);
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) {
this.handleTasksCompleted();
}
this.checkForStageCompleted();
}
}
/**
* Adds more files to an ingest job, i.e., derived or carved files. Not
* Adds more files to an ingest job, i.e., extracted or carved files. Not
* currently supported for the second stage of the job.
*
* @param files A list of files to add.
*/
void addFiles(List<AbstractFile> files) {
/**
* Note: This implementation assumes that this is being called by an an
* ingest module running code on an ingest thread that is holding a
* reference to an ingest task, so no task completion check is done.
*/
if (IngestJob.Stages.FIRST == this.stage) {
for (AbstractFile file : files) {
IngestJob.taskScheduler.scheduleFileIngestTask(this, file);
@ -335,10 +380,18 @@ final class IngestJob {
} else {
IngestJob.logger.log(Level.SEVERE, "Adding files during second stage not supported"); //NON-NLS
}
/**
* The intended clients of this method are ingest modules running code
* on an ingest thread that is holding a reference to an ingest task, in
* which case a task completion check would not be necessary. This is a
* bit of defensive programming.
*/
this.checkForStageCompleted();
}
/**
* Updates the display name of the data source ingest progress bar.
* Updates the display name of the data source level ingest progress bar.
*
* @param displayName The new display name.
*/
@ -351,8 +404,9 @@ final class IngestJob {
}
/**
* Switches the data source progress bar to determinate mode. This should be
* called if the total work units to process the data source is known.
* Switches the data source level ingest progress bar to determinate mode.
* This should be called if the total work units to process the data source
* is known.
*
* @param workUnits Total number of work units for the processing of the
* data source.
@ -368,9 +422,9 @@ final class IngestJob {
}
/**
* Switches the data source ingest progress bar to indeterminate mode. This
* should be called if the total work units to process the data source is
* unknown.
* Switches the data source level ingest progress bar to indeterminate mode.
* This should be called if the total work units to process the data source
* is unknown.
*/
void switchDataSourceIngestProgressBarToIndeterminate() {
if (!this.cancelled) {
@ -383,8 +437,8 @@ final class IngestJob {
}
/**
* Updates the data source ingest progress bar with the number of work units
* performed, if in the determinate mode.
* Updates the data source level ingest progress bar with the number of work
* units performed, if in the determinate mode.
*
* @param workUnits Number of work units performed.
*/
@ -399,39 +453,41 @@ final class IngestJob {
}
/**
* Updates the data source ingest progress bar display name.
* Updates the data source level ingest progress with a new task name, where
* the task name is the "subtitle" under the display name.
*
* @param displayName The new display name.
* @param currentTask The task name.
*/
void advanceDataSourceIngestProgressBar(String displayName) {
void advanceDataSourceIngestProgressBar(String currentTask) {
if (!this.cancelled) {
synchronized (this.dataSourceIngestProgressLock) {
if (null != this.dataSourceIngestProgress) {
this.dataSourceIngestProgress.progress(displayName);
this.dataSourceIngestProgress.progress(currentTask);
}
}
}
}
/**
* Updates the progress bar with the number of work units performed, if in
* the determinate mode.
* Updates the data source level ingest progress bar with a new task name
* and the number of work units performed, if in the determinate mode. The
* task name is the "subtitle" under the display name.
*
* @param message Message to display in sub-title
* @param currentTask The task name.
* @param workUnits Number of work units performed.
*/
void advanceDataSourceIngestProgressBar(String message, int workUnits) {
void advanceDataSourceIngestProgressBar(String currentTask, int workUnits) {
if (!this.cancelled) {
synchronized (this.fileIngestProgressLock) {
this.dataSourceIngestProgress.progress(message, workUnits);
this.dataSourceIngestProgress.progress(currentTask, workUnits);
}
}
}
/**
* Determines whether or not a temporary cancellation of data source ingest
* in order to stop the currently executing data source ingest module is in
* effect.
* Queries whether or not a temporary cancellation of data source level
* ingest in order to stop the currently executing data source level ingest
* module is in effect.
*
* @return True or false.
*/
@ -440,17 +496,19 @@ final class IngestJob {
}
/**
* Rescind a temporary cancellation of data source ingest in order to stop
* the currently executing data source ingest module.
* Rescind a temporary cancellation of data source level ingest that was
* used to stop a single data source level ingest module.
*/
void currentDataSourceIngestModuleCancellationCompleted() {
this.currentDataSourceIngestModuleCancelled = false;
// A new progress bar must be created because the cancel button of the
// previously constructed component is disabled by NetBeans when the
// user selects the "OK" button of the cancellation confirmation dialog
// popped up by NetBeans when the progress bar cancel button was
// pressed.
/**
* A new progress bar must be created because the cancel button of the
* previously constructed component is disabled by NetBeans when the
* user selects the "OK" button of the cancellation confirmation dialog
* popped up by NetBeans when the progress bar cancel button was
* pressed.
*/
synchronized (this.dataSourceIngestProgressLock) {
this.dataSourceIngestProgress.finish();
this.dataSourceIngestProgress = null;
@ -459,12 +517,14 @@ final class IngestJob {
}
/**
* Requests cancellation of ingest, i.e., a shutdown of the data source and
* file ingest pipelines.
* Requests cancellation of ingest, i.e., a shutdown of the data source
* level and file level ingest pipelines.
*/
void cancel() {
// Put a cancellation message on data source ingest progress bar,
// if it is still running.
/**
* Put a cancellation message on data source level ingest progress bar,
* if it is still running.
*/
synchronized (this.dataSourceIngestProgressLock) {
if (dataSourceIngestProgress != null) {
final String displayName = NbBundle.getMessage(this.getClass(),
@ -477,8 +537,10 @@ final class IngestJob {
}
}
// Put a cancellation message on the file ingest progress bar,
// if it is still running.
/**
* Put a cancellation message on the file level ingest progress bar, if
* it is still running.
*/
synchronized (this.fileIngestProgressLock) {
if (this.fileIngestProgress != null) {
final String displayName = NbBundle.getMessage(this.getClass(),
@ -493,14 +555,16 @@ final class IngestJob {
this.cancelled = true;
/**
* Tell the task scheduler to cancel all pending tasks.
* Tell the task scheduler to cancel all pending tasks, i.e., tasks not
* not being performed by an ingest thread.
*/
IngestJob.taskScheduler.cancelPendingTasksForIngestJob(this);
this.checkForStageCompleted();
}
/**
* Queries whether or not cancellation of ingest i.e., a shutdown of the
* data source and file ingest pipelines, has been requested
* data source level and file level ingest pipelines, has been requested.
*
* @return True or false.
*/
@ -508,85 +572,6 @@ final class IngestJob {
return this.cancelled;
}
/**
* Creates the file and data source ingest pipelines.
*
* @param ingestModuleTemplates Ingest module templates to use to populate
* the pipelines.
*/
private void createIngestPipelines(List<IngestModuleTemplate> ingestModuleTemplates) {
// Make mappings of ingest module factory class names to templates.
Map<String, IngestModuleTemplate> dataSourceModuleTemplates = new HashMap<>();
Map<String, IngestModuleTemplate> fileModuleTemplates = new HashMap<>();
for (IngestModuleTemplate template : ingestModuleTemplates) {
if (template.isDataSourceIngestModuleTemplate()) {
dataSourceModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
if (template.isFileIngestModuleTemplate()) {
fileModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
}
// Use the mappings and the ingest pipelines configuration to create
// ordered lists of ingest module templates for each ingest pipeline.
IngestPipelinesConfiguration pipelineConfigs = IngestPipelinesConfiguration.getInstance();
List<IngestModuleTemplate> firstStageDataSourceModuleTemplates = this.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageOneDataSourceIngestPipelineConfig());
List<IngestModuleTemplate> fileIngestModuleTemplates = this.getConfiguredIngestModuleTemplates(fileModuleTemplates, pipelineConfigs.getFileIngestPipelineConfig());
List<IngestModuleTemplate> secondStageDataSourceModuleTemplates = this.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageTwoDataSourceIngestPipelineConfig());
// Add any module templates that were not specified in the pipeline
// configurations to an appropriate pipeline - either the first stage
// data source ingest pipeline or the file ingest pipeline.
for (IngestModuleTemplate template : dataSourceModuleTemplates.values()) {
firstStageDataSourceModuleTemplates.add(template);
}
for (IngestModuleTemplate template : fileModuleTemplates.values()) {
fileIngestModuleTemplates.add(template);
}
// Contruct the data source ingest pipelines.
this.firstStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, firstStageDataSourceModuleTemplates);
this.secondStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, secondStageDataSourceModuleTemplates);
this.dataSourceIngestPipeline = firstStageDataSourceIngestPipeline;
// Construct the file ingest pipelines.
try {
int numberOfFileIngestThreads = IngestManager.getInstance().getNumberOfFileIngestThreads();
for (int i = 0; i < numberOfFileIngestThreads; ++i) {
this.fileIngestPipelines.put(new FileIngestPipeline(this, fileIngestModuleTemplates));
}
} catch (InterruptedException ex) {
/**
* The current thread was interrupted while blocked on a full queue.
* Blocking should never happen here, but reset the interrupted flag
* rather than just swallowing the exception.
*/
Thread.currentThread().interrupt();
}
}
/**
* Use an ordered list of ingest module factory class names to create an
* ordered subset of a collection ingest module templates. The ingest module
* templates are removed from the input collection as they are added to the
* output collection.
*
* @param ingestModuleTemplates A mapping of ingest module factory class
* names to ingest module templates.
* @param pipelineConfig An ordered list of ingest module factory class
* names representing an ingest pipeline.
* @return
*/
List<IngestModuleTemplate> getConfiguredIngestModuleTemplates(Map<String, IngestModuleTemplate> ingestModuleTemplates, List<String> pipelineConfig) {
List<IngestModuleTemplate> templates = new ArrayList<>();
for (String moduleClassName : pipelineConfig) {
if (ingestModuleTemplates.containsKey(moduleClassName)) {
templates.add(ingestModuleTemplates.remove(moduleClassName));
}
}
return templates;
}
/**
* Starts up the ingest pipelines and ingest progress bars.
*
@ -597,17 +582,102 @@ final class IngestJob {
List<IngestModuleError> errors = startUpIngestPipelines();
if (errors.isEmpty()) {
if (this.hasFirstStageDataSourceIngestPipeline() || this.hasFileIngestPipeline()) {
// There is at least one first stage pipeline.
this.startFirstStage();
} else if (this.hasSecondStageDataSourceIngestPipeline()) {
// There is no first stage pipeline, but there is a second stage
// ingest pipeline.
this.startSecondStage();
}
}
return errors;
}
/**
* Creates the file and data source ingest pipelines.
*
* @param ingestModuleTemplates Ingest module templates to use to populate
* the pipelines.
*/
private void createIngestPipelines(List<IngestModuleTemplate> ingestModuleTemplates) {
/**
* Make mappings of ingest module factory class names to templates.
*/
Map<String, IngestModuleTemplate> dataSourceModuleTemplates = new HashMap<>();
Map<String, IngestModuleTemplate> fileModuleTemplates = new HashMap<>();
for (IngestModuleTemplate template : ingestModuleTemplates) {
if (template.isDataSourceIngestModuleTemplate()) {
dataSourceModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
if (template.isFileIngestModuleTemplate()) {
fileModuleTemplates.put(template.getModuleFactory().getClass().getCanonicalName(), template);
}
}
/**
* Use the mappings and the ingest pipelines configuration to create
* ordered lists of ingest module templates for each ingest pipeline.
*/
IngestPipelinesConfiguration pipelineConfigs = IngestPipelinesConfiguration.getInstance();
List<IngestModuleTemplate> firstStageDataSourceModuleTemplates = IngestJob.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageOneDataSourceIngestPipelineConfig());
List<IngestModuleTemplate> fileIngestModuleTemplates = IngestJob.getConfiguredIngestModuleTemplates(fileModuleTemplates, pipelineConfigs.getFileIngestPipelineConfig());
List<IngestModuleTemplate> secondStageDataSourceModuleTemplates = IngestJob.getConfiguredIngestModuleTemplates(dataSourceModuleTemplates, pipelineConfigs.getStageTwoDataSourceIngestPipelineConfig());
/**
* Add any module templates that were not specified in the pipelines
* configuration to an appropriate pipeline - either the first stage
* data source ingest pipeline or the file ingest pipeline.
*/
for (IngestModuleTemplate template : dataSourceModuleTemplates.values()) {
firstStageDataSourceModuleTemplates.add(template);
}
for (IngestModuleTemplate template : fileModuleTemplates.values()) {
fileIngestModuleTemplates.add(template);
}
/**
* Construct the data source ingest pipelines.
*/
this.firstStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, firstStageDataSourceModuleTemplates);
this.secondStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, secondStageDataSourceModuleTemplates);
/**
* Construct the file ingest pipelines, one per file ingest thread.
*/
try {
int numberOfFileIngestThreads = IngestManager.getInstance().getNumberOfFileIngestThreads();
for (int i = 0; i < numberOfFileIngestThreads; ++i) {
this.fileIngestPipelines.put(new FileIngestPipeline(this, fileIngestModuleTemplates));
}
} catch (InterruptedException ex) {
/**
* The current thread was interrupted while blocked on a full queue.
* Blocking should actually never happen here, but reset the
* interrupted flag rather than just swallowing the exception.
*/
Thread.currentThread().interrupt();
}
}
/**
* Use an ordered list of ingest module factory class names to create an
* ordered output list of ingest module templates for an ingest pipeline.
* The ingest module templates are removed from the input collection as they
* are added to the output collection.
*
* @param ingestModuleTemplates A mapping of ingest module factory class
* names to ingest module templates.
* @param pipelineConfig An ordered list of ingest module factory class
* names representing an ingest pipeline.
* @return
*/
private static List<IngestModuleTemplate> getConfiguredIngestModuleTemplates(Map<String, IngestModuleTemplate> ingestModuleTemplates, List<String> pipelineConfig) {
List<IngestModuleTemplate> templates = new ArrayList<>();
for (String moduleClassName : pipelineConfig) {
if (ingestModuleTemplates.containsKey(moduleClassName)) {
templates.add(ingestModuleTemplates.remove(moduleClassName));
}
}
return templates;
}
/**
* Starts the first stage of the job.
*/
@ -615,7 +685,7 @@ final class IngestJob {
this.stage = IngestJob.Stages.FIRST;
/**
* Start one or both of the first stage progress bars.
* Start one or both of the first stage ingest progress bars.
*/
if (this.hasFirstStageDataSourceIngestPipeline()) {
this.startDataSourceIngestProgressBar();
@ -624,6 +694,14 @@ final class IngestJob {
this.startFileIngestProgressBar();
}
/**
* Make the first stage data source level ingest pipeline the current
* data source level pipeline.
*/
synchronized (this.dataSourceIngestPipelineLock) {
this.currentDataSourceIngestPipeline = this.firstStageDataSourceIngestPipeline;
}
/**
* Schedule the first stage tasks.
*/
@ -639,11 +717,9 @@ final class IngestJob {
* it is possible, if unlikely, that no file ingest tasks were
* actually scheduled since there are files that get filtered out by
* the tasks scheduler. In this special case, an ingest thread will
* never get to make the following check for this stage of the job.
* never to check for completion of this stage of the job.
*/
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) {
this.handleTasksCompleted();
}
this.checkForStageCompleted();
}
}
@ -653,7 +729,9 @@ final class IngestJob {
private void startSecondStage() {
this.stage = IngestJob.Stages.SECOND;
this.startDataSourceIngestProgressBar();
this.dataSourceIngestPipeline = this.secondStageDataSourceIngestPipeline;
synchronized (this.dataSourceIngestPipelineLock) {
this.currentDataSourceIngestPipeline = this.secondStageDataSourceIngestPipeline;
}
IngestJob.taskScheduler.scheduleDataSourceIngestTask(this);
}
@ -669,7 +747,8 @@ final class IngestJob {
}
/**
* Checks to see if this job has a first stage data source ingest pipeline.
* Checks to see if this job has a first stage data source level ingest
* pipeline.
*
* @return True or false.
*/
@ -678,7 +757,8 @@ final class IngestJob {
}
/**
* Checks to see if this job has a second stage data source ingest pipeline.
* Checks to see if this job has a second stage data source level ingest
* pipeline.
*
* @return True or false.
*/
@ -687,7 +767,7 @@ final class IngestJob {
}
/**
* Checks to see if the job has a file ingest pipeline.
* Checks to see if the job has a file level ingest pipeline.
*
* @return True or false.
*/
@ -696,8 +776,8 @@ final class IngestJob {
}
/**
* Starts up each of the file and data source ingest modules to collect
* possible errors.
* Starts up each of the file and data source level ingest modules to
* collect possible errors.
*
* @return A collection of ingest module startup errors, empty on success.
*/
@ -705,7 +785,7 @@ final class IngestJob {
List<IngestModuleError> errors = new ArrayList<>();
// Start up the first stage data source ingest pipeline.
errors.addAll(this.dataSourceIngestPipeline.startUp());
errors.addAll(this.firstStageDataSourceIngestPipeline.startUp());
// Start up the second stage data source ingest pipeline.
errors.addAll(this.secondStageDataSourceIngestPipeline.startUp());
@ -738,7 +818,7 @@ final class IngestJob {
}
/**
* Starts the data source ingest progress bar.
* Starts the data source level ingest progress bar.
*/
private void startDataSourceIngestProgressBar() {
synchronized (this.dataSourceIngestProgressLock) {
@ -771,7 +851,7 @@ final class IngestJob {
}
/**
* Starts the file ingest progress bar.
* Starts the file level ingest progress bar.
*/
private void startFileIngestProgressBar() {
synchronized (this.fileIngestProgressLock) {
@ -796,17 +876,21 @@ final class IngestJob {
}
/**
* Handles when all ingest tasks for this job are completed by finishing the
* current stage and possibly starting the next stage.
* Checks to see if the ingest tasks for the current stage are completed and
* does a stage transition if they are.
*/
private void handleTasksCompleted() {
switch (this.stage) {
case FIRST:
this.finishFirstStage();
break;
case SECOND:
this.finish();
break;
private void checkForStageCompleted() {
synchronized (this.stageCompletionCheckLock) {
if (IngestJob.taskScheduler.tasksForJobAreCompleted(this)) {
switch (this.stage) {
case FIRST:
this.finishFirstStage();
break;
case SECOND:
this.finish();
break;
}
}
}
}
@ -859,6 +943,8 @@ final class IngestJob {
* Shuts down the ingest pipelines and progress bars for this job.
*/
private void finish() {
this.stage = IngestJob.Stages.FINALIZATION;
// Finish the second stage data source ingest progress bar, if it hasn't
// already been finished.
synchronized (this.dataSourceIngestProgressLock) {
@ -890,8 +976,8 @@ final class IngestJob {
}
/**
* Requests a temporary cancellation of data source ingest in order to stop
* the currently executing data source ingest module.
* Requests a temporary cancellation of data source level ingest in order to
* stop the currently executing data source ingest module.
*/
private void cancelCurrentDataSourceIngestModule() {
this.currentDataSourceIngestModuleCancelled = true;
@ -902,8 +988,9 @@ final class IngestJob {
*
* @return An ingest job statistics object.
*/
private IngestJobSnapshot getIngestJobSnapshot() {
private IngestJobSnapshot getSnapshot() {
return new IngestJobSnapshot();
}
/**
@ -932,19 +1019,36 @@ final class IngestJob {
this.estimatedFilesToProcess = IngestJob.this.estimatedFilesToProcess;
this.snapShotTime = new Date().getTime();
}
/**
* Get a snapshot of the tasks currently in progress for this job.
*/
this.tasksSnapshot = IngestJob.taskScheduler.getTasksSnapshotForJob(this.jobId);
}
/**
* Gets the identifier of the ingest job that is the subject of this
* snapshot.
*
* @return The ingest job id.
*/
long getJobId() {
return this.jobId;
}
/**
* Gets the name of the data source associated with the ingest job that
* is the subject of this snapshot.
*
* @return A data source name string.
*/
String getDataSource() {
return dataSource;
}
/**
* Gets files per second throughput since job started.
* Gets files per second throughput since the ingest job that is the
* subject of this snapshot started.
*
* @return Files processed per second (approximate).
*/
@ -953,7 +1057,7 @@ final class IngestJob {
}
/**
* Gets the the ingest job was started.
* Gets the time the ingest job was started.
*
* @return The start time as number of milliseconds since January 1,
* 1970, 00:00:00 GMT.

View File

@ -30,6 +30,9 @@ import javax.swing.table.TableColumn;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.openide.util.NbBundle;
/**
* A panel that displays ingest task progress snapshots.
*/
public class IngestProgressSnapshotPanel extends javax.swing.JPanel {
private final JDialog parent;

View File

@ -16,7 +16,7 @@ Contains only the core ingest modules that ship with Autopsy -->
<MODULE>org.sleuthkit.autopsy.thunderbirdparser.EmailParserModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.interestingitems.InterestingItemsIngestModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.photoreccarver.PhotoRecCarverIngestModuleFactory</MODULE>
<MODULE>org.sleuthkit.autopsy.modules.photoreccarver.PhotoRecCarverIngestModuleFactory</MODULE>
</PIPELINE>
<PIPELINE type="ImageAnalysisStageTwo">

View File

@ -216,7 +216,7 @@ final class PhotoRecCarverFileIngestModule implements FileIngestModule {
PhotoRecCarverOutputParser parser = new PhotoRecCarverOutputParser(outputDirPath);
List<LayoutFile> theList = parser.parse(newAuditFile, id, file);
if (theList != null) { // if there were any results from carving, add the unallocated carving event to the reports list.
context.scheduleFiles(new ArrayList<>(theList));
context.addFilesToJob(new ArrayList<>(theList));
}
}
catch (IOException ex) {

View File

@ -181,7 +181,7 @@ public final class SevenZipIngestModule implements FileIngestModule {
//currently sending a single event for all new files
services.fireModuleContentEvent(new ModuleContentEvent(abstractFile));
context.scheduleFiles(unpackedFiles);
context.addFilesToJob(unpackedFiles);
}
return ProcessResult.OK;

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Copyright 2013-2014 Basis Technology Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -48,7 +48,14 @@ public final class ReportBranding implements ReportBrandingProviderI {
private String reportsBrandingDir; //dir with extracted reports branding resources
private static final String MODULE_NAME = ReportBranding.class.getSimpleName();
private static final Logger logger = Logger.getLogger(ReportBranding.class.getName());
private static String generatorLogoPath;
// this is static so that it can be set by another object
// before the report is actually made. Entire class should
// probably become singleton. Is set to null until setPath
// is called to specify something other than default.
private static String generatorLogoPath = null;
private String defaultGeneratorLogoPath;
public ReportBranding() {
@ -64,7 +71,7 @@ public final class ReportBranding implements ReportBrandingProviderI {
//TODO use defaults
}
}
extractGeneratorLogo();
extractDefaultGeneratorLogo();
getAgencyLogoPath();
getReportTitle();
}
@ -74,29 +81,40 @@ public final class ReportBranding implements ReportBrandingProviderI {
return reportsBrandingDir;
}
private void extractGeneratorLogo() {
/**
* extract default logo from JAR file to local file.
*/
private void extractDefaultGeneratorLogo() {
try {
PlatformUtil.extractResourceToUserConfigDir(getClass(), DEFAULT_GENERATOR_LOGO, true);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error extracting report branding resource for generator logo ", ex); //NON-NLS
}
generatorLogoPath = PlatformUtil.getUserConfigDirectory() + File.separator + DEFAULT_GENERATOR_LOGO;
defaultGeneratorLogoPath = PlatformUtil.getUserConfigDirectory() + File.separator + DEFAULT_GENERATOR_LOGO;
}
@Override
public String getGeneratorLogoPath() {
// if no one called to change the path, use default
if (generatorLogoPath == null)
generatorLogoPath = defaultGeneratorLogoPath;
return generatorLogoPath;
}
@Override
public void setGeneratorLogoPath(String path) {
generatorLogoPath = path;
}
@Override
public String getAgencyLogoPath() {
String curPath = null;
/* The agency logo code uses these properties to persist changes
* in the logo (within the same process).
* This is different from the generator logo that uses a static variable.
*/
curPath = ModuleSettings.getConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP);
//if has been set, validate it's correct, if not set, return null
if (curPath != null && new File(curPath).canRead() == false) {
@ -110,6 +128,8 @@ public final class ReportBranding implements ReportBrandingProviderI {
@Override
public void setAgencyLogoPath(String path) {
// Use properties to persist the logo to use.
// Should use static variable instead
ModuleSettings.setConfigSetting(MODULE_NAME, AGENCY_LOGO_PATH_PROP, path);
}

View File

@ -247,8 +247,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
return;
}
if (context.isJobCancelled()) {
logger.log(Level.INFO, "Ingest job cancelled"); //NON-NLS
if (context.fileIngestIsCancelled()) {
stop();
return;
}

View File

@ -128,7 +128,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -204,7 +204,7 @@ class Chrome extends Extract {
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); //NON-NLS
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -341,7 +341,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -416,7 +416,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -504,7 +504,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}

View File

@ -52,6 +52,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.*;
@ -111,7 +112,7 @@ class ExtractIE extends Extract {
continue;
}
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
@ -201,7 +202,7 @@ class ExtractIE extends Extract {
dataFound = true;
for (AbstractFile cookiesFile : cookiesFiles) {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
if (cookiesFile.getSize() == 0) {
@ -309,7 +310,7 @@ class ExtractIE extends Extract {
//indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat";
temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; //NON-NLS
File datFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
try {
@ -324,6 +325,9 @@ class ExtractIE extends Extract {
String filename = "pasco2Result." + indexFile.getId() + ".txt"; //NON-NLS
boolean bPascProcSuccess = executePasco(temps, filename);
if (context.dataSourceIngestIsCancelled()) {
return;
}
//At this point pasco2 proccessed the index files.
//Now fetch the results, parse them and the delete the files.
@ -354,34 +358,26 @@ class ExtractIE extends Extract {
*/
private boolean executePasco(String indexFilePath, String outputFileName) {
boolean success = true;
Writer writer = null;
ExecUtil execPasco = new ExecUtil();
try {
final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName;
logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath); //NON-NLS
writer = new FileWriter(outputFileFullPath);
execPasco.execute(writer, JAVA_PATH,
"-cp", PASCO_LIB_PATH, //NON-NLS
"isi.pasco2.Main", "-T", "history", indexFilePath ); //NON-NLS
final String errFileFullPath = moduleTempResultsDir + File.separator + outputFileName + ".err";
logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath); //NON-NLS
List<String> commandLine = new ArrayList<>();
commandLine.add(JAVA_PATH);
commandLine.add("-cp"); //NON-NLS
commandLine.add(PASCO_LIB_PATH);
commandLine.add("isi.pasco2.Main"); //NON-NLS
commandLine.add("-T"); //NON-NLS
commandLine.add("history"); //NON-NLS
commandLine.add(indexFilePath);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectOutput(new File(outputFileFullPath));
processBuilder.redirectError(new File(errFileFullPath));
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
// @@@ Investigate use of history versus cache as type.
} catch (IOException ex) {
success = false;
logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); //NON-NLS
} catch (InterruptedException ex) {
success = false;
logger.log(Level.SEVERE, "Pasco has been interrupted, failed to extract some web history from Internet Explorer.", ex); //NON-NLS
}
finally {
if (writer != null) {
try {
writer.flush();
writer.close();
} catch (IOException ex) {
logger.log(Level.WARNING, "Error closing writer stream after for Pasco result", ex); //NON-NLS
}
}
execPasco.stop();
}
return success;
}

View File

@ -37,6 +37,7 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.recentactivity.UsbDeviceIdMapper.USBInfo;
import org.sleuthkit.datamodel.*;
@ -61,15 +62,11 @@ class ExtractRegistry extends Extract {
private String RR_PATH;
private String RR_FULL_PATH;
private boolean rrFound = false; // true if we found the Autopsy-specific version of regripper
private boolean rrFullFound = false; // true if we found the full version of regripper
final private static String MODULE_VERSION = "1.0";
private boolean rrFullFound = false; // true if we found the full version of regripper
private Content dataSource;
private IngestJobContext context;
final private static UsbDeviceIdMapper usbMapper = new UsbDeviceIdMapper();
//hide public constructor to prevent from instantiation by ingest module loader
ExtractRegistry() {
moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text");
final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); //NON-NLS
@ -169,7 +166,7 @@ class ExtractRegistry extends Extract {
continue;
}
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
@ -182,10 +179,9 @@ class ExtractRegistry extends Extract {
logger.log(Level.SEVERE, null, ex);
}
logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); //NON-NLS
RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase);
if (context.isJobCancelled()) {
logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{moduleName, regFileNameLocal}); //NON-NLS
RegOutputFiles regOutputFiles = ripRegistryFile(regFileNameLocal, outputPathBase);
if (context.dataSourceIngestIsCancelled()) {
break;
}
@ -268,9 +264,9 @@ class ExtractRegistry extends Extract {
* @param regFilePath Path to local copy of registry
* @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on
*/
private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) {
private RegOutputFiles ripRegistryFile(String regFilePath, String outFilePathBase) {
String autopsyType = ""; // Type argument for rr for autopsy-specific modules
String fullType = ""; // Type argument for rr for full set of modules
String fullType; // Type argument for rr for full set of modules
RegOutputFiles regOutputFiles = new RegOutputFiles();
@ -298,78 +294,44 @@ class ExtractRegistry extends Extract {
// run the autopsy-specific set of modules
if (!autopsyType.isEmpty() && rrFound) {
// TODO - add error messages
Writer writer = null;
ExecUtil execRR = null;
try {
regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; //NON-NLS
logger.log(Level.INFO, "Writing RegRipper results to: " + regOutputFiles.autopsyPlugins); //NON-NLS
writer = new FileWriter(regOutputFiles.autopsyPlugins);
execRR = new ExecUtil();
execRR.execute(writer, RR_PATH,
"-r", regFilePath, "-f", autopsyType); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to RegRipper and process parse some registry files.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile",
this.getName()));
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "RegRipper has been interrupted, failed to parse registry.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile2",
this.getName()));
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error closing output writer after running RegRipper", ex); //NON-NLS
}
}
if (execRR != null) {
execRR.stop();
}
}
regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; //NON-NLS
String errFilePath = outFilePathBase + "-autopsy.err.txt"; //NON-NLS
logger.log(Level.INFO, "Writing RegRipper results to: {0}", regOutputFiles.autopsyPlugins); //NON-NLS
executeRegRipper(regFilePath, autopsyType, regOutputFiles.autopsyPlugins, errFilePath);
}
if (context.dataSourceIngestIsCancelled()) {
return regOutputFiles;
}
// run the full set of rr modules
if (!fullType.isEmpty() && rrFullFound) {
Writer writer = null;
ExecUtil execRR = null;
try {
regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; //NON-NLS
logger.log(Level.INFO, "Writing Full RegRipper results to: " + regOutputFiles.fullPlugins); //NON-NLS
writer = new FileWriter(regOutputFiles.fullPlugins);
execRR = new ExecUtil();
execRR.execute(writer, RR_FULL_PATH,
"-r", regFilePath, "-f", fullType); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to run full RegRipper and process parse some registry files.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile3",
this.getName()));
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "RegRipper full has been interrupted, failed to parse registry.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile4",
this.getName()));
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error closing output writer after running RegRipper full", ex); //NON-NLS
}
}
if (execRR != null) {
execRR.stop();
}
}
}
regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; //NON-NLS
String errFilePath = outFilePathBase + "-full.err.txt"; //NON-NLS
logger.log(Level.INFO, "Writing Full RegRipper results to: {0}", regOutputFiles.fullPlugins); //NON-NLS
executeRegRipper(regFilePath, fullType, regOutputFiles.fullPlugins, errFilePath);
}
return regOutputFiles;
}
private void executeRegRipper(String hiveFilePath, String hiveFileType, String outputFile, String errFile) {
try {
logger.log(Level.INFO, "Writing RegRipper results to: {0}", outputFile); //NON-NLS
List<String> commandLine = new ArrayList<>();
commandLine.add(RR_PATH);
commandLine.add("-r"); //NON-NLS
commandLine.add(hiveFilePath);
commandLine.add("-f"); //NON-NLS
commandLine.add(hiveFileType);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectOutput(new File(outputFile));
processBuilder.redirectError(new File(errFile));
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to run RegRipper", ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName()));
}
}
// @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT
/**
*
@ -558,7 +520,7 @@ class ExtractRegistry extends Extract {
}
break;
default:
logger.log(Level.WARNING, "Unercognized node name: " + dataType);
logger.log(Level.WARNING, "Unrecognized node name: {0}", dataType);
break;
}
}

View File

@ -116,7 +116,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -197,7 +197,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -277,7 +277,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -385,16 +385,16 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
List<HashMap<String, Object>> tempList = this.dbConnect(temps, downloadQuery);
logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); //NON-NLS
logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(),
@ -494,7 +494,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}

View File

@ -93,7 +93,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
for (int i = 0; i < extracters.size(); i++) {
Extract extracter = extracters.get(i);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS
break;
}
@ -161,7 +161,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
historyMsg.toString());
services.postMessage(inboxMsg);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}

View File

@ -79,7 +79,7 @@ class RecentDocumentsByLnk extends Extract {
dataFound = true;
for (AbstractFile recentFile : recentFiles) {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}

View File

@ -278,7 +278,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS
for (BlackboardArtifact artifact : listArtifacts) {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break; //User cancled the process.
}
@ -346,7 +346,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
} catch (TskCoreException e) {
logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); //NON-NLS
} finally {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
logger.info("Operation terminated by user."); //NON-NLS
}
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(

View File

@ -223,7 +223,7 @@ class ScalpelCarverIngestModule implements FileIngestModule {
}
// reschedule carved files
context.scheduleFiles(new ArrayList<AbstractFile>(carvedFiles));
context.addFilesToJob(new ArrayList<AbstractFile>(carvedFiles));
return ProcessResult.OK;
}

View File

@ -289,7 +289,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
services.fireModuleContentEvent(new ModuleContentEvent(derived));
}
}
context.scheduleFiles(derivedFiles);
context.addFilesToJob(derivedFiles);
services.fireModuleDataEvent(new ModuleDataEvent(EmailParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG));
}