From e99925fb7dd30832e1c7fc4cc46806a75e849065 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 17 Mar 2014 17:45:59 -0400 Subject: [PATCH] Check in current state of new ingest framework for safekeeping --- .../SampleDataSourceIngestModule.java | 182 +++---- .../examples/SampleFileIngestModule.java | 214 ++++---- .../ingest/DataSourceIngestModule.java | 7 +- .../DataSourceIngestModuleStatusHelper.java | 12 +- .../ingest/DataSourceIngestPipeline.java | 101 ---- .../autopsy/ingest/DataSourceIngestTask.java | 394 ++++++++++++++ .../autopsy/ingest/FileIngestModule.java | 4 +- .../autopsy/ingest/FileIngestPipeline.java | 100 ---- .../ingest/GetFilesContentVisitor.java | 5 +- .../autopsy/ingest/IngestConfigurator.java | 8 +- .../sleuthkit/autopsy/ingest/IngestJob.java | 131 ----- .../autopsy/ingest/IngestManager.java | 98 ++-- .../autopsy/ingest/IngestMessagePanel.java | 2 +- .../autopsy/ingest/IngestModule.java | 72 +-- .../autopsy/ingest/IngestModuleAdapter.java | 14 +- .../autopsy/ingest/IngestModuleContext.java | 22 +- .../autopsy/ingest/IngestModuleLoader.java | 39 +- .../autopsy/ingest/IngestModuleSettings.java | 1 + .../autopsy/ingest/IngestMonitor.java | 3 +- .../ingest/IngestPipelinesConfiguration.java | 129 +++++ .../autopsy/ingest/IngestScheduler.java | 42 +- .../autopsy/ingest/IngestServices.java | 64 +-- .../ExifParserFileIngestModule.java | 6 +- .../FileExtMismatchIngestModule.java | 3 +- .../hashdatabase/HashDbIngestModule.java | 4 +- .../KeywordSearchIngestModule.java | 4 +- .../KeywordSearchModuleFactory.java | 4 +- .../autopsy/recentactivity/Chrome.java | 14 +- .../autopsy/recentactivity/Extract.java | 38 +- .../autopsy/recentactivity/ExtractIE.java | 22 +- .../recentactivity/ExtractRegistry.java | 8 +- .../autopsy/recentactivity/ExtractUSB.java | 5 +- .../autopsy/recentactivity/Firefox.java | 3 +- .../recentactivity/RAImageIngestModule.java | 51 +- .../recentactivity/RecentDocumentsByLnk.java | 7 +- .../SearchEngineURLQueryAnalyzer.java | 37 +- ScalpelCarver/nbproject/project.xml | 8 + .../scalpel/ScalpelCarverIngestModule.java | 480 ++++++++---------- .../scalpel/ScalpelCarverModuleFactory.java | 65 +++ .../sevenzip/SevenZipIngestModule.java | 6 +- .../ewfverify/EwfVerifyIngestModule.java | 33 +- .../EmailParserModuleFactory.java | 2 +- 42 files changed, 1261 insertions(+), 1183 deletions(-) delete mode 100755 Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java create mode 100644 Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java delete mode 100755 Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java create mode 100755 ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverModuleFactory.java diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java index 99526afb8f..ef827b763e 100755 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java @@ -1,108 +1,85 @@ /* -* Sample module in the public domain. Feel free to use this as a template -* for your modules. -* -* Contact: Brian Carrier [carrier sleuthkit [dot] org] -* -* This is free and unencumbered software released into the public domain. -* -* Anyone is free to copy, modify, publish, use, compile, sell, or -* distribute this software, either in source code form or as a compiled -* binary, for any purpose, commercial or non-commercial, and by any -* means. -* -* In jurisdictions that recognize copyright laws, the author or authors -* of this software dedicate any and all copyright interest in the -* software to the public domain. We make this dedication for the benefit -* of the public at large and to the detriment of our heirs and -* successors. We intend this dedication to be an overt act of -* relinquishment in perpetuity of all present and future rights to this -* software under copyright law. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -* OTHER DEALINGS IN THE SOFTWARE. -*/ - + * Sample module in the public domain. Feel free to use this as a template + * for your modules. + * + * Contact: Brian Carrier [carrier sleuthkit [dot] org] + * + * This is free and unencumbered software released into the public domain. + * + * Anyone is free to copy, modify, publish, use, compile, sell, or + * distribute this software, either in source code form or as a compiled + * binary, for any purpose, commercial or non-commercial, and by any + * means. + * + * In jurisdictions that recognize copyright laws, the author or authors + * of this software dedicate any and all copyright interest in the + * software to the public domain. We make this dedication for the benefit + * of the public at large and to the detriment of our heirs and + * successors. We intend this dedication to be an overt act of + * relinquishment in perpetuity of all present and future rights to this + * software under copyright law. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR + * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. + */ package org.sleuthkit.autopsy.examples; -// RJCTODO: Rework this module for the new interfaces -//import java.util.List; -//import org.apache.log4j.Logger; -//import org.openide.util.Exceptions; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.autopsy.casemodule.services.FileManager; -//import org.sleuthkit.autopsy.casemodule.services.Services; -//import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; -//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.datamodel.AbstractFile; -//import org.sleuthkit.datamodel.Content; -//import org.sleuthkit.datamodel.FsContent; -//import org.sleuthkit.datamodel.SleuthkitCase; -//import org.sleuthkit.datamodel.TskCoreException; +import java.util.List; +import org.apache.log4j.Logger; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.services.FileManager; +import org.sleuthkit.autopsy.casemodule.services.Services; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.FsContent; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +// RJCTODO: Remove inheritance from IngestModuleAdapter and provide better documentation. /** - * Sample DataSource-level ingest module that doesn't do much at all. - * Just exists to show basic idea of these modules + * Sample data source ingest module that doesn't do much. Note that the + * IngestModuleAdapter abstract class could have been used as a base class to + * obtain default implementations of many of the DataSourceIngestModule methods. */ -// class SampleDataSourceIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleDataSource { -// -// /* Data Source modules operate on a disk or set of logical files. They -// * are passed in teh data source refernce and query it for things they want. -// */ -// @Override -// public void process(PipelineContext pipelineContext, Content dataSource, DataSourceIngestModuleStatusHelper controller) { -// -// Case case1 = Case.getCurrentCase(); -// SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); -// -// Services services = new Services(sleuthkitCase); -// FileManager fm = services.getFileManager(); -// try { -// /* you can use the findFiles method in FileManager (or similar ones in -// * SleuthkitCase to find files based only on their name. This -// * one finds files that have a .doc extension. */ -// List docFiles = fm.findFiles(dataSource, "%.doc"); -// for (AbstractFile file : docFiles) { -// // do something with each doc file -// } -// -// /* We can also do more general queries with findFilesWhere, which -// * allows us to make our own WHERE clause in the database. -// */ -// long currentTime = System.currentTimeMillis()/1000; -// // go back 2 weeks -// long minTime = currentTime - (14 * 24 * 60 * 60); -// List otherFiles = sleuthkitCase.findFilesWhere("crtime > " + minTime); -// // do something with these files... -// -// } catch (TskCoreException ex) { -// Logger log = Logger.getLogger(SampleDataSourceIngestModule.class); -// log.fatal("Error retrieving files from database: " + ex.getLocalizedMessage()); -// return; -// } -// } -// -// @Override -// public void init(IngestModuleInit initContext) { -// // do nothing -// } -// -// @Override -// public void complete() { -// // do nothing -// } -// -// @Override -// public void stop() { -// // do nothing -// } -// +// RJCTODO: Add service provider annotatin (commend out) +class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { + + private static final Logger logger = Logger.getLogger(SampleDataSourceIngestModule.class); + + @Override + public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { + Case case1 = Case.getCurrentCase(); + SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); + + Services services = new Services(sleuthkitCase); + FileManager fileManager = services.getFileManager(); + try { + List docFiles = fileManager.findFiles(dataSource, "%.doc"); + for (AbstractFile file : docFiles) { + // do something with each doc file + } + + long currentTime = System.currentTimeMillis() / 1000; + long minTime = currentTime - (14 * 24 * 60 * 60); // Go back two weeks. + List otherFiles = sleuthkitCase.findFilesWhere("crtime > " + minTime); + // do something with these files... + + } catch (TskCoreException ex) { + logger.fatal("Error retrieving files from database: " + ex.getLocalizedMessage()); + return IngestModule.ResultCode.OK; + } + + return IngestModule.ResultCode.OK; + } // @Override // public String getName() { // return "SampleDataSourceIngestModule"; @@ -117,9 +94,4 @@ package org.sleuthkit.autopsy.examples; // public String getDescription() { // return "Doesn't do much"; // } -// -// @Override -// public boolean hasBackgroundJobsRunning() { -// return false; -// } -//} +} diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java index 2db5eaa7e2..8466304646 100755 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java @@ -30,17 +30,21 @@ package org.sleuthkit.autopsy.examples; -//import org.apache.log4j.Logger; -//import org.openide.util.Exceptions; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.datamodel.AbstractFile; -//import org.sleuthkit.datamodel.BlackboardArtifact; -//import org.sleuthkit.datamodel.BlackboardAttribute; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.SleuthkitCase; -//import org.sleuthkit.datamodel.TskData; +import org.apache.log4j.Logger; +import org.openide.util.Exceptions; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; +import org.sleuthkit.autopsy.ingest.IngestModuleContext; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskData; -// RJCTODO +// RJCTODO: Remove inheritance from IngestModuleAdapter and provide better documentation. /** * This is a sample and simple module. It is a file-level ingest module, meaning * that it will get called on each file in the disk image / logical file set. @@ -51,108 +55,85 @@ package org.sleuthkit.autopsy.examples; * IngestModuleLoader will not load things from the org.sleuthkit.autopsy.examples package. * Either change the package or the loading code to make this module actually run. */ -// class SampleFileIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { -// private int attrId = -1; -// private static SampleFileIngestModule defaultInstance = null; -// -// // Private to ensure Singleton status -// private SampleFileIngestModule() { -// } -// -// // File-level ingest modules are currently singleton -- this is required -// public static synchronized SampleFileIngestModule getDefault() { -// //defaultInstance is a private static class variable -// if (defaultInstance == null) { -// defaultInstance = new SampleFileIngestModule(); -// } -// return defaultInstance; -// } -// -// -// @Override -// public void init(IngestModuleInit initContext) { -// /* For this demo, we are going to make a private attribute to post our -// * results to the blackbaord with. There are many standard blackboard artifact -// * and attribute types and you should first consider using one of those before -// * making private ones because other modules won't know about provate ones. -// * Because our demo has results that have no real value, we do not have an -// * official attribute for them. -// */ -// Case case1 = Case.getCurrentCase(); -// SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); -// -// // see if the type already exists in the blackboard. -// try { -// attrId = sleuthkitCase.getAttrTypeID("ATTR_SAMPLE"); -// } catch (TskCoreException ex) { -// // create it if not -// try { -// attrId = sleuthkitCase.addAttrType("ATTR_SAMPLE", "Sample Attribute"); -// } catch (TskCoreException ex1) { -// Logger log = Logger.getLogger(SampleFileIngestModule.class); -// log.fatal("Error adding attribute type: " + ex1.getLocalizedMessage()); -// attrId = -1; -// } -// } -// } -// -// @Override -// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { -// // skip non-files -// if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || -// (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { -// return ProcessResult.OK; -// } -// -// // skip NSRL / known files -// if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { -// return ProcessResult.OK; -// } -// -// -// /* Do a non-sensical calculation of the number of 0x00 bytes -// * in the first 1024-bytes of the file. This is for demo -// * purposes only. -// */ -// try { -// byte buffer[] = new byte[1024]; -// int len = abstractFile.read(buffer, 0, 1024); -// int count = 0; -// for (int i = 0; i < len; i++) { -// if (buffer[i] == 0x00) { -// count++; -// } -// } -// -// if (attrId != -1) { -// // Make an attribute using the ID for the private type that we previously created. -// BlackboardAttribute attr = new BlackboardAttribute(attrId, getName(), count); -// -// /* add it to the general info artifact. In real modules, you would likely have -// * more complex data types and be making more specific artifacts. -// */ -// BlackboardArtifact art = abstractFile.getGenInfoArtifact(); -// art.addAttribute(attr); -// } -// -// return ProcessResult.OK; -// } catch (TskCoreException ex) { -// Exceptions.printStackTrace(ex); -// return ProcessResult.ERROR; -// } -// } -// -// -// @Override -// public void complete() { -// -// } -// -// @Override -// public void stop() { -// -// } -// + // RJCTODO: Add service provider annotatin (commend out) +class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestModule { + private int attrId = -1; + + @Override + public void startUp(IngestModuleContext initContext) { + /* For this demo, we are going to make a private attribute to post our + * results to the blackbaord with. There are many standard blackboard artifact + * and attribute types and you should first consider using one of those before + * making private ones because other modules won't know about provate ones. + * Because our demo has results that have no real value, we do not have an + * official attribute for them. + */ + Case case1 = Case.getCurrentCase(); + SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); + + // see if the type already exists in the blackboard. + try { + attrId = sleuthkitCase.getAttrTypeID("ATTR_SAMPLE"); + } catch (TskCoreException ex) { + // create it if not + try { + attrId = sleuthkitCase.addAttrType("ATTR_SAMPLE", "Sample Attribute"); + } catch (TskCoreException ex1) { + Logger log = Logger.getLogger(SampleFileIngestModule.class); + log.fatal("Error adding attribute type: " + ex1.getLocalizedMessage()); + attrId = -1; + } + } + } + + @Override + public IngestModule.ResultCode process(AbstractFile abstractFile) { + // skip non-files + if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || + (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { + return IngestModule.ResultCode.OK; + } + + // skip NSRL / known files + if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { + return IngestModule.ResultCode.OK; + } + + + /* Do a non-sensical calculation of the number of 0x00 bytes + * in the first 1024-bytes of the file. This is for demo + * purposes only. + */ + try { + byte buffer[] = new byte[1024]; + int len = abstractFile.read(buffer, 0, 1024); + int count = 0; + for (int i = 0; i < len; i++) { + if (buffer[i] == 0x00) { + count++; + } + } + + if (attrId != -1) { + // Make an attribute using the ID for the private type that we previously created. + BlackboardAttribute attr = new BlackboardAttribute(attrId, "SampleFileIngestModule", count); // RJCTODO: Set up with name as exmaple + + /* add it to the general info artifact. In real modules, you would likely have + * more complex data types and be making more specific artifacts. + */ + BlackboardArtifact art = abstractFile.getGenInfoArtifact(); + art.addAttribute(attr); + } + + return IngestModule.ResultCode.OK; + } catch (TskCoreException ex) { + Exceptions.printStackTrace(ex); + return IngestModule.ResultCode.ERROR; + } + } + + // RJCTODO: Add a module factory + // @Override // public String getVersion() { // return "1.0"; @@ -167,10 +148,5 @@ package org.sleuthkit.autopsy.examples; // public String getDescription() { // return "Doesn't do much"; // } -// -// @Override -// public boolean hasBackgroundJobsRunning() { -// // we're single threaded... -// return false; -// } -//} + +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 39e93512e2..175ba77d0b 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -21,7 +21,10 @@ package org.sleuthkit.autopsy.ingest; import org.sleuthkit.datamodel.Content; /** - * Interface that must be implemented by all data source ingest modules. + * Interface that must be implemented by all data source ingest modules. Data + * source ingest modules work at the granularity of data sources, while file + * ingest modules work at the granularity of individual files from a data + * source. */ public interface DataSourceIngestModule extends IngestModule { @@ -30,7 +33,7 @@ public interface DataSourceIngestModule extends IngestModule { * * @param dataSource The data source to process. * @param statusHelper A status helper to be used to report progress and - * detect ingest job cancellation. + * detect cancellation. * @return A result code indicating success or failure of the processing. */ ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java index e03121d47f..2cd2c16a32 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java @@ -23,8 +23,8 @@ import org.netbeans.api.progress.ProgressHandle; import org.sleuthkit.datamodel.Content; /** - * Used by data source ingest modules to report progress and check for ingest - * job cancellation. + * Used by data source ingest modules to report progress and check for data + * source ingest task cancellation. */ public class DataSourceIngestModuleStatusHelper { @@ -40,12 +40,12 @@ public class DataSourceIngestModuleStatusHelper { /** * Checks for ingest job cancellation. This should be polled by the module - * in its process() method. If the ingest job is canceled, the module should - * return from its process() method as quickly as possible. + * in its process() method. If the ingest task is canceled, the module + * should return from its process() method as quickly as possible. * * @return True if the task has been canceled, false otherwise */ - public boolean isCanceled() { + public boolean isCancelled() { return worker.isCancelled(); } @@ -67,7 +67,7 @@ public class DataSourceIngestModuleStatusHelper { * Switches the progress bar to indeterminate mode. This should be called if * the total work units to process the data source is unknown. */ - public void switchToInDeterminate() { + public void switchToIndeterminate() { if (progress != null) { progress.switchToIndeterminate(); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java deleted file mode 100755 index 2362c07baf..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2014 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import javax.swing.SwingWorker; -import org.netbeans.api.progress.ProgressHandle; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.Content; - -/** - * A data source ingest pipeline composed of a sequence of data source ingest - * modules constructed from ingest module templates. The pipeline is specific to - * a single ingest job. - */ -class DataSourceIngestPipeline { - - private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName()); - private final IngestJob ingestJob; - private final List moduleTemplates; - private List modules = new ArrayList<>(); - - DataSourceIngestPipeline(IngestJob ingestJob, List moduleTemplates) { - this.ingestJob = ingestJob; - this.moduleTemplates = moduleTemplates; - } - - List startUp() throws Exception { - List errors = new ArrayList<>(); - for (IngestModuleTemplate template : moduleTemplates) { - IngestModuleFactory factory = template.getIngestModuleFactory(); - if (factory.isDataSourceIngestModuleFactory()) { - IngestModuleSettings ingestOptions = template.getIngestOptions(); - DataSourceIngestModule module = factory.createDataSourceIngestModule(ingestOptions); - IngestModuleContext context = new IngestModuleContext(this.ingestJob, factory); - try { - module.startUp(context); - this.modules.add(module); - IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName()); - } catch (Exception ex) { - errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex)); - } - } - } - return errors; - } - - List ingestDataSource(SwingWorker worker, ProgressHandle progress) { - List errors = new ArrayList<>(); - Content dataSource = this.ingestJob.getDataSource(); - logger.log(Level.INFO, "Ingesting data source {0}", dataSource.getName()); - for (DataSourceIngestModule module : this.modules) { - try { - progress.start(); - progress.switchToIndeterminate(); - module.process(dataSource, new DataSourceIngestModuleStatusHelper(worker, progress, dataSource)); - progress.finish(); - } catch (Exception ex) { - errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex)); - } - IngestModuleContext context = module.getContext(); - if (context.isIngestJobCancelled()) { - break; - } - } - return errors; - } - - List shutDown(boolean ingestJobCancelled) { - List errors = new ArrayList<>(); - for (DataSourceIngestModule module : this.modules) { - try { - module.shutDown(ingestJobCancelled); - } catch (Exception ex) { - errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex)); - } finally { - IngestModuleContext context = module.getContext(); - IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.COMPLETED.toString(), context.getModuleDisplayName()); - } - } - return errors; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java new file mode 100644 index 0000000000..865c50a645 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java @@ -0,0 +1,394 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.ingest; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import javax.swing.SwingWorker; +import org.netbeans.api.progress.ProgressHandle; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; + +/** + * Encapsulates a data source and the ingest module pipelines to be used to + * ingest the data source. + */ +final class DataSourceIngestTask { + + private final long id; + private final Content dataSource; + private final List ingestModuleTemplates; + private final boolean processUnallocatedSpace; + private final HashMap fileIngestPipelines = new HashMap<>(); + private final HashMap dataSourceIngestPipelines = new HashMap<>(); + private FileIngestPipeline initialFileIngestPipeline = null; + private DataSourceIngestPipeline initialDataSourceIngestPipeline = null; + private boolean cancelled; + + DataSourceIngestTask(long id, Content dataSource, List ingestModuleTemplates, boolean processUnallocatedSpace) { + this.id = id; + this.dataSource = dataSource; + this.ingestModuleTemplates = ingestModuleTemplates; + this.processUnallocatedSpace = processUnallocatedSpace; + this.cancelled = false; + } + + long getId() { + return id; + } + + Content getDataSource() { + return dataSource; + } + + boolean shouldProcessUnallocatedSpace() { + return processUnallocatedSpace; + } + + synchronized void cancel() { + cancelled = true; + } + + synchronized boolean isCancelled() { + return cancelled; + } + + synchronized List startUpIngestPipelines() { + // Create a per thread instance of each pipeline type right now to make + // (reasonably) sure that the ingest modules can be started. + initialDataSourceIngestPipeline = new DataSourceIngestPipeline(this, ingestModuleTemplates); + initialFileIngestPipeline = new FileIngestPipeline(this, ingestModuleTemplates); + List errors = new ArrayList<>(); + errors.addAll(initialDataSourceIngestPipeline.startUp()); + errors.addAll(initialFileIngestPipeline.startUp()); + return errors; + } + + synchronized DataSourceIngestPipeline getDataSourceIngestPipelineForThread(long threadId) { + DataSourceIngestPipeline pipeline; + if (initialDataSourceIngestPipeline != null) { + pipeline = initialDataSourceIngestPipeline; + initialDataSourceIngestPipeline = null; + dataSourceIngestPipelines.put(threadId, pipeline); + } else if (!dataSourceIngestPipelines.containsKey(threadId)) { + pipeline = new DataSourceIngestPipeline(this, ingestModuleTemplates); + pipeline.startUp(); // RJCTODO: If time permits, return possible errors with pipeline or some such thing + dataSourceIngestPipelines.put(threadId, pipeline); + } else { + pipeline = dataSourceIngestPipelines.get(threadId); + } + return pipeline; + } + + synchronized FileIngestPipeline getFileIngestPipelineForThread(long threadId) { + FileIngestPipeline pipeline; + if (initialFileIngestPipeline != null) { + pipeline = initialFileIngestPipeline; + initialFileIngestPipeline = null; + fileIngestPipelines.put(threadId, pipeline); + } else if (!fileIngestPipelines.containsKey(threadId)) { + pipeline = new FileIngestPipeline(this, ingestModuleTemplates); + pipeline.startUp(); + fileIngestPipelines.put(threadId, pipeline); + } else { + pipeline = fileIngestPipelines.get(threadId); + } + return pipeline; + } + + synchronized List releaseIngestPipelinesForThread(long threadId) { + List errors = new ArrayList<>(); + + DataSourceIngestPipeline dataSourceIngestPipeline = dataSourceIngestPipelines.get(threadId); + if (dataSourceIngestPipeline != null) { + errors.addAll(dataSourceIngestPipeline.shutDown(cancelled)); + } + this.dataSourceIngestPipelines.remove(threadId); + + FileIngestPipeline fileIngestPipeline = fileIngestPipelines.get(threadId); + if (fileIngestPipeline != null) { + errors.addAll(fileIngestPipeline.shutDown(cancelled)); + } + this.fileIngestPipelines.remove(threadId); + + return errors; + } + + synchronized boolean areIngestPipelinesShutDown() { + return (dataSourceIngestPipelines.isEmpty() && fileIngestPipelines.isEmpty()); + } + + // RJCTODO: Write story in JIRA for removing code dunplication + /** + * A data source ingest pipeline composed of a sequence of data source ingest + * modules constructed from ingest module templates. + */ + static final class DataSourceIngestPipeline { + + private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName()); + private final DataSourceIngestTask task; + private final List moduleTemplates; + private List modules = new ArrayList<>(); + + private DataSourceIngestPipeline(DataSourceIngestTask task, List moduleTemplates) { + this.task = task; + this.moduleTemplates = moduleTemplates; + } + + private List startUp() { + List errors = new ArrayList<>(); + + // Create an ingest module instance from each ingest module template + // that has an ingest module factory capable of making data source + // ingest modules. Map the module class names to the module instance + // to allow the modules to be put in the sequence indicated by the + // ingest pipelines configuration. + Map modulesByClass = new HashMap<>(); + for (IngestModuleTemplate template : moduleTemplates) { + IngestModuleFactory factory = template.getIngestModuleFactory(); + if (factory.isDataSourceIngestModuleFactory()) { + IngestModuleSettings ingestOptions = template.getIngestOptions(); + DataSourceIngestModuleDecorator module = new DataSourceIngestModuleDecorator(factory.createDataSourceIngestModule(ingestOptions), factory.getModuleDisplayName()); + IngestModuleContext context = new IngestModuleContext(task, factory); + try { + module.startUp(context); + modulesByClass.put(module.getClass().getCanonicalName(), module); + IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName()); + } catch (Exception ex) { + errors.add(new IngestModuleError(module.getDisplayName(), ex)); + } + } + } + + // Establish the module sequence of the core ingest modules + // indicated by the ingest pipeline configuration, adding any + // additional modules found in the global lookup to the end of the + // pipeline in arbitrary order. + List pipelineConfig = IngestPipelinesConfiguration.getInstance().getDataSourceIngestPipelineConfig(); + for (String moduleClassName : pipelineConfig) { + if (modulesByClass.containsKey(moduleClassName)) { + modules.add(modulesByClass.remove(moduleClassName)); + } + } + for (DataSourceIngestModuleDecorator module : modulesByClass.values()) { + modules.add(module); + } + + return errors; + } + + List process(SwingWorker worker, ProgressHandle progress) { + List errors = new ArrayList<>(); + Content dataSource = this.task.getDataSource(); + logger.log(Level.INFO, "Processing data source {0}", dataSource.getName()); + for (DataSourceIngestModuleDecorator module : this.modules) { + try { + progress.start(); + progress.switchToIndeterminate(); + module.process(dataSource, new DataSourceIngestModuleStatusHelper(worker, progress, dataSource)); + progress.finish(); + } catch (Exception ex) { + errors.add(new IngestModuleError(module.getDisplayName(), ex)); + } + if (task.isCancelled()) { + break; + } + } + return errors; + } + + private List shutDown(boolean ingestJobCancelled) { + List errors = new ArrayList<>(); + for (DataSourceIngestModuleDecorator module : this.modules) { + try { + module.shutDown(ingestJobCancelled); + } catch (Exception ex) { + errors.add(new IngestModuleError(module.getDisplayName(), ex)); + } finally { + IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.COMPLETED.toString(), module.getDisplayName()); + } + } + return errors; + } + + private static class DataSourceIngestModuleDecorator implements DataSourceIngestModule { + + private final DataSourceIngestModule module; + private final String displayName; + + DataSourceIngestModuleDecorator(DataSourceIngestModule module, String displayName) { + this.module = module; + this.displayName = displayName; + } + + String getClassName() { + return module.getClass().getCanonicalName(); + } + + String getDisplayName() { + return displayName; + } + + @Override + public void startUp(IngestModuleContext context) throws Exception { + module.startUp(context); + } + + @Override + public IngestModule.ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { + return module.process(dataSource, statusHelper); + } + + @Override + public void shutDown(boolean ingestJobWasCancelled) { + module.shutDown(ingestJobWasCancelled); + } + } + } + + /** + * A file ingest pipeline composed of a sequence of file ingest modules + * constructed from ingest module templates. + */ + static final class FileIngestPipeline { + + private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName()); + private final DataSourceIngestTask task; + private final List moduleTemplates; + private List modules = new ArrayList<>(); + + private FileIngestPipeline(DataSourceIngestTask task, List moduleTemplates) { + this.task = task; + this.moduleTemplates = moduleTemplates; + } + + private List startUp() { + List errors = new ArrayList<>(); + + // Create an ingest module instance from each ingest module template + // that has an ingest module factory capable of making data source + // ingest modules. Map the module class names to the module instance + // to allow the modules to be put in the sequence indicated by the + // ingest pipelines configuration. + Map modulesByClass = new HashMap<>(); + for (IngestModuleTemplate template : moduleTemplates) { + IngestModuleFactory factory = template.getIngestModuleFactory(); + if (factory.isFileIngestModuleFactory()) { + IngestModuleSettings ingestOptions = template.getIngestOptions(); + FileIngestModuleDecorator module = new FileIngestModuleDecorator(factory.createFileIngestModule(ingestOptions), factory.getModuleDisplayName()); + IngestModuleContext context = new IngestModuleContext(task, factory); + try { + module.startUp(context); + modulesByClass.put(module.getClass().getCanonicalName(), module); + IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName()); + } catch (Exception ex) { + errors.add(new IngestModuleError(module.getDisplayName(), ex)); + } + } + } + + // Establish the module sequence of the core ingest modules + // indicated by the ingest pipeline configuration, adding any + // additional modules found in the global lookup to the end of the + // pipeline in arbitrary order. + List pipelineConfig = IngestPipelinesConfiguration.getInstance().getFileIngestPipelineConfig(); + for (String moduleClassName : pipelineConfig) { + if (modulesByClass.containsKey(moduleClassName)) { + modules.add(modulesByClass.remove(moduleClassName)); + } + } + for (FileIngestModuleDecorator module : modulesByClass.values()) { + modules.add(module); + } + + return errors; + } + + List process(AbstractFile file) { + List errors = new ArrayList<>(); + Content dataSource = this.task.getDataSource(); + logger.log(Level.INFO, String.format("Processing {0} from {1}", file.getName(), dataSource.getName())); + for (FileIngestModuleDecorator module : this.modules) { + try { + module.process(file); + } catch (Exception ex) { + errors.add(new IngestModuleError(module.getDisplayName(), ex)); + } + if (task.isCancelled()) { + break; + } + } + file.close(); + IngestManager.fireFileDone(file.getId()); + return errors; + } + + private List shutDown(boolean ingestJobCancelled) { + List errors = new ArrayList<>(); + for (FileIngestModuleDecorator module : this.modules) { + try { + module.shutDown(ingestJobCancelled); + } catch (Exception ex) { + errors.add(new IngestModuleError(module.getDisplayName(), ex)); + } finally { + IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.COMPLETED.toString(), module.getDisplayName()); + } + } + return errors; + } + + private static class FileIngestModuleDecorator implements FileIngestModule { + + private final FileIngestModule module; + private final String displayName; + + FileIngestModuleDecorator(FileIngestModule module, String displayName) { + this.module = module; + this.displayName = displayName; + } + + String getClassName() { + return module.getClass().getCanonicalName(); + } + + String getDisplayName() { + return displayName; + } + + @Override + public void startUp(IngestModuleContext context) throws Exception { + module.startUp(context); + } + + @Override + public IngestModule.ResultCode process(AbstractFile file) { + return module.process(file); + } + + @Override + public void shutDown(boolean ingestJobWasCancelled) { + module.shutDown(ingestJobWasCancelled); + } + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java index 159cdb0da8..d5f3939de6 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -21,7 +21,9 @@ package org.sleuthkit.autopsy.ingest; import org.sleuthkit.datamodel.AbstractFile; /** - * Interface that must be implemented by all file ingest modules. + * Interface that must be implemented by all file ingest modules. File ingest + * modules work at the granularity of individual files from a data source, while + * data source ingest modules work at the granularity of data sources. */ public interface FileIngestModule extends IngestModule { diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java deleted file mode 100755 index f2663b9cd6..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2014 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Content; - -/** - * A file ingest pipeline composed of a sequence of file ingest modules - * constructed from ingest module templates. The pipeline is specific to a - * single ingest job. - */ -class FileIngestPipeline { - - private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName()); - private final IngestJob ingestJob; - private final List moduleTemplates; - private List modules = new ArrayList<>(); - - FileIngestPipeline(IngestJob ingestJob, List moduleTemplates) { - this.ingestJob = ingestJob; - this.moduleTemplates = moduleTemplates; - } - - List startUp() { - List errors = new ArrayList<>(); - for (IngestModuleTemplate template : moduleTemplates) { - IngestModuleFactory factory = template.getIngestModuleFactory(); - if (factory.isFileIngestModuleFactory()) { - IngestModuleSettings ingestOptions = template.getIngestOptions(); - FileIngestModule module = factory.createFileIngestModule(ingestOptions); - IngestModuleContext context = new IngestModuleContext(this.ingestJob, factory); - try { - module.startUp(context); - this.modules.add(module); - IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName()); - } catch (Exception ex) { - errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex)); - } - } - } - return errors; - } - - List ingestFile(AbstractFile file) { - List errors = new ArrayList<>(); - Content dataSource = this.ingestJob.getDataSource(); - logger.log(Level.INFO, String.format("Ingesting {0} from {1}", file.getName(), dataSource.getName())); - for (FileIngestModule module : this.modules) { - try { - module.process(file); - } catch (Exception ex) { - errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex)); - } - IngestModuleContext context = module.getContext(); - if (context.isIngestJobCancelled()) { - break; - } - } - file.close(); - IngestManager.fireFileDone(file.getId()); - return errors; - } - - List shutDown(boolean ingestJobCancelled) { - List errors = new ArrayList<>(); - for (FileIngestModule module : this.modules) { - try { - module.shutDown(ingestJobCancelled); - } catch (Exception ex) { - errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex)); - } finally { - IngestModuleContext context = module.getContext(); - IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), context.getModuleDisplayName()); - } - } - return errors; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/GetFilesContentVisitor.java b/Core/src/org/sleuthkit/autopsy/ingest/GetFilesContentVisitor.java index 0e4cccd78a..43bca8ac6e 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/GetFilesContentVisitor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/GetFilesContentVisitor.java @@ -33,9 +33,10 @@ import org.sleuthkit.datamodel.Volume; import org.sleuthkit.datamodel.VolumeSystem; /** - * Abstract visitor for getting all the files from content - * TODO should be moved to utility module (needs resolve cyclic deps) + * Abstract visitor for getting all the files from content. */ + // TODO Could be moved to utility package, is there another version of this +// somewhere? abstract class GetFilesContentVisitor implements ContentVisitor> { private static final Logger logger = Logger.getLogger(GetFilesContentVisitor.class.getName()); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java index be4c2d738a..64ef23ab01 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java @@ -52,7 +52,7 @@ public class IngestConfigurator { // Get the ingest module factories discovered by the ingest module // loader. // RJCTODO: Put in name uniqueness test/solution in loader! - List moduleFactories = IngestModuleLoader.getDefault().getIngestModuleFactories(); + List moduleFactories = IngestModuleLoader.getInstance().getIngestModuleFactories(); HashSet loadedModuleNames = new HashSet<>(); for (IngestModuleFactory moduleFactory : moduleFactories) { loadedModuleNames.add(moduleFactory.getModuleDisplayName()); @@ -208,7 +208,8 @@ public class IngestConfigurator { csvList.append(list.get(list.size() - 1)); return csvList.toString(); } - + + // RJCTODO: May need additional mappings private HashSet getModulesNamesFromSetting(String key, String defaultSetting) { // Get the ingest modules setting from the user's config file. // If there is no such setting yet, create the default setting. @@ -227,8 +228,7 @@ public class IngestConfigurator { moduleNames.add("Email Parser"); break; case "File Extension Mismatch Detection": - case "Extension Mismatch Detector": - moduleNames.add("File Extension Mismatch Detector"); + moduleNames.add("Extension Mismatch Detector"); break; default: moduleNames.add(name); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java deleted file mode 100644 index 61b6ddf17a..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2014 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Objects; -import org.sleuthkit.datamodel.Content; - -/** - * Encapsulates a data source and the ingest module pipelines to be used to - * ingest the data source. - */ -class IngestJob { - - private final long id; - private final Content dataSource; - private final List ingestModuleTemplates; - private final boolean processUnallocatedSpace; - private final HashMap fileIngestPipelines = new HashMap<>(); - private final HashMap dataSourceIngestPipelines = new HashMap<>(); - private FileIngestPipeline initialFileIngestPipeline = null; - private DataSourceIngestPipeline initialDataSourceIngestPipeline = null; - private volatile boolean cancelled; - - IngestJob(long id, Content dataSource, List ingestModuleTemplates, boolean processUnallocatedSpace) { - this.id = id; - this.dataSource = dataSource; - this.ingestModuleTemplates = ingestModuleTemplates; - this.processUnallocatedSpace = processUnallocatedSpace; - this.cancelled = false; - } - - long getId() { - return id; - } - - Content getDataSource() { - return dataSource; - } - - boolean getProcessUnallocatedSpace() { - return this.processUnallocatedSpace; - } - - synchronized void cancel() { - this.cancelled = true; - } - - synchronized boolean isCancelled() { // RJCTODO: It seems like this is only used in the pipelines, where it no longer belongs, I think... - return this.cancelled; - } - - synchronized List startUpIngestPipelines() throws Exception { - // Create at least one instance of each pipeline type now to make - // reasonably sure the ingest modules can be started. - List errors = new ArrayList<>(); - errors.addAll(this.initialDataSourceIngestPipeline.startUp()); - errors.addAll(this.initialFileIngestPipeline.startUp()); - return errors; - } - - synchronized FileIngestPipeline getFileIngestPipelineForThread(long threadId) { - FileIngestPipeline pipeline; - if (null != this.initialFileIngestPipeline) { - pipeline = this.initialFileIngestPipeline; - this.initialDataSourceIngestPipeline = null; - fileIngestPipelines.put(threadId, pipeline); - } else if (!fileIngestPipelines.containsKey(threadId)) { - pipeline = new FileIngestPipeline(this, this.ingestModuleTemplates); - fileIngestPipelines.put(threadId, pipeline); - } else { - pipeline = fileIngestPipelines.get(threadId); - } - return pipeline; - } - - synchronized DataSourceIngestPipeline getDataSourceIngestPipelineForThread(long threadId) { - DataSourceIngestPipeline pipeline; - if (null != this.initialDataSourceIngestPipeline) { - pipeline = this.initialDataSourceIngestPipeline; - this.initialDataSourceIngestPipeline = null; - dataSourceIngestPipelines.put(threadId, pipeline); - } else if (!dataSourceIngestPipelines.containsKey(threadId)) { - pipeline = new DataSourceIngestPipeline(this, this.ingestModuleTemplates); - dataSourceIngestPipelines.put(threadId, pipeline); - } else { - pipeline = dataSourceIngestPipelines.get(threadId); - } - return pipeline; - } - - synchronized List releaseIngestPipelinesForThread(long threadId) { - List errors = new ArrayList<>(); - - DataSourceIngestPipeline dataSourceIngestPipeline = dataSourceIngestPipelines.get(threadId); - if (dataSourceIngestPipeline != null) { - errors.addAll(dataSourceIngestPipeline.shutDown(this.cancelled)); - } - this.dataSourceIngestPipelines.remove(threadId); - - FileIngestPipeline fileIngestPipeline = fileIngestPipelines.get(threadId); - if (fileIngestPipeline != null) { - errors.addAll(fileIngestPipeline.shutDown(this.cancelled)); - } - this.fileIngestPipelines.remove(threadId); - - return errors; - } - - synchronized boolean arePipelinesShutDown() { - return (dataSourceIngestPipelines.isEmpty() && fileIngestPipelines.isEmpty()); - } -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index a6c5f85c26..e4abe0ea51 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -50,7 +50,7 @@ public class IngestManager { private static IngestManager instance; private final IngestScheduler scheduler; private final IngestMonitor ingestMonitor = new IngestMonitor(); - private final HashMap ingestJobs = new HashMap<>(); + private final HashMap ingestJobs = new HashMap<>(); private TaskSchedulingWorker taskSchedulingWorker; private FileTaskWorker fileTaskWorker; private DataSourceTaskWorker dataSourceTaskWorker; @@ -261,8 +261,8 @@ public class IngestManager { * @param pipelineContext ingest context used to ingest parent of the file * to be scheduled */ - void scheduleFileTask(long ingestJobId, AbstractFile file) { - IngestJob job = this.ingestJobs.get(ingestJobId); // RJCTODO: Consider renaming + void scheduleFileTask(long ingestJobId, AbstractFile file) { // RJCTODO: With the module context, this can be passed the task itself + DataSourceIngestTask job = this.ingestJobs.get(ingestJobId); // RJCTODO: Consider renaming if (job == null) { // RJCTODO: Handle severe error } @@ -303,35 +303,45 @@ public class IngestManager { } synchronized void reportThreadDone(long threadId) { - for (IngestJob job : ingestJobs.values()) { + for (DataSourceIngestTask job : ingestJobs.values()) { job.releaseIngestPipelinesForThread(threadId); // RJCTODO: Add logging of errors or send ingest messages - if (job.arePipelinesShutDown()) { + if (job.areIngestPipelinesShutDown()) { ingestJobs.remove(job.getId()); } } } synchronized void stopAll() { - for (IngestJob job : ingestJobs.values()) { - job.cancel(); - } - + // First get the task scheduling worker to stop. if (taskSchedulingWorker != null) { taskSchedulingWorker.cancel(true); + while (!taskSchedulingWorker.isDone()) { + // Wait. + // RJCTODO: Add sleep? + } taskSchedulingWorker = null; } - scheduler.getFileScheduler().empty(); - scheduler.getDataSourceScheduler().empty(); + // Now mark all of the ingest jobs as cancelled. This way the ingest + // modules will know they are being shut down due to cancellation when + // the ingest worker threads release their pipelines. + for (DataSourceIngestTask job : ingestJobs.values()) { + job.cancel(); + } + // Cancel the worker threads. if (dataSourceTaskWorker != null) { dataSourceTaskWorker.cancel(true); } - if (fileTaskWorker != null) { fileTaskWorker.cancel(true); } + + // Jettision the remaining tasks. This will dispose of any tasks that + // the scheduling worker queued up before it was cancelled. + scheduler.getFileScheduler().empty(); + scheduler.getDataSourceScheduler().empty(); } /** @@ -378,6 +388,7 @@ public class IngestManager { private final List dataSources; private final List moduleTemplates; private final boolean processUnallocatedSpace; + private final List scheduledJobIds = new ArrayList<>(); private ProgressHandle progress; TaskSchedulingWorker(List dataSources, List moduleTemplates, boolean processUnallocatedSpace) { @@ -388,6 +399,8 @@ public class IngestManager { @Override protected Object doInBackground() throws Exception { + // Set up a progress bar that can be used to cancel all of the + // ingest jobs currently being performed. final String displayName = "Queueing ingest tasks"; progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() { @Override @@ -396,20 +409,25 @@ public class IngestManager { if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } - return TaskSchedulingWorker.this.cancel(true); + IngestManager.getDefault().stopAll(); + return true; } }); progress.start(2 * dataSources.size()); int processed = 0; for (Content dataSource : dataSources) { + if (isCancelled()) { + logger.log(Level.INFO, "Task scheduling thread cancelled"); + return null; + } + final String inputName = dataSource.getName(); - IngestJob ingestJob = new IngestJob(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace); + DataSourceIngestTask ingestJob = new DataSourceIngestTask(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace); List errors = ingestJob.startUpIngestPipelines(); if (!errors.isEmpty()) { - // RJCTODO: Log all errors. Provide a list of all of the modules - // that failed. + // RJCTODO: Log all errors, not just the first one. Provide a list of all of the modules that failed. MessageNotifyUtil.Message.error( "Failed to load " + errors.get(0).getModuleDisplayName() + " ingest module.\n\n" + "No ingest modules will be run. Please disable the module " @@ -423,13 +441,11 @@ public class IngestManager { ingestJobs.put(ingestJob.getId(), ingestJob); // Queue the data source ingest tasks for the ingest job. - logger.log(Level.INFO, "Queueing data source tasks: {0}", ingestJob); progress.progress("DataSource Ingest" + " " + inputName, processed); scheduler.getDataSourceScheduler().schedule(ingestJob); progress.progress("DataSource Ingest" + " " + inputName, ++processed); // Queue the file ingest tasks for the ingest job. - logger.log(Level.INFO, "Queuing file ingest tasks: {0}", ingestJob); progress.progress("File Ingest" + " " + inputName, processed); scheduler.getFileScheduler().scheduleIngestOfFiles(ingestJob); progress.progress("File Ingest" + " " + inputName, ++processed); @@ -443,25 +459,17 @@ public class IngestManager { try { super.get(); } catch (CancellationException | InterruptedException ex) { - handleInterruption(ex); + // IngestManager.stopAll() will dispose of all tasks. } catch (Exception ex) { - logger.log(Level.SEVERE, "Error while enqueing files. ", ex); - handleInterruption(ex); + logger.log(Level.SEVERE, "Error while scheduling ingest jobs", ex); + // RJCTODO: On EDT, report error, cannot dump all tasks since multiple data source tasks can be submitted. Would get partial results either way. } finally { - if (this.isCancelled()) { - handleInterruption(new Exception()); - } else { + if (!isCancelled()) { startAll(); } progress.finish(); } } - - private void handleInterruption(Exception ex) { - // RJCTODO: This seems broken, should empty only for current job? - scheduler.getFileScheduler().empty(); - scheduler.getDataSourceScheduler().empty(); - } } /** @@ -481,10 +489,9 @@ public class IngestManager { protected Void doInBackground() throws Exception { logger.log(Level.INFO, String.format("Data source ingest thread {0} started", this.id)); - // Set up a progress bar with cancel capability. This is one of two - // ways that the worker can be canceled. The other way is via a call - // to IngestManager.stopAll(). - final String displayName = "Data Source"; + // Set up a progress bar that can be used to cancel all of the + // ingest jobs currently being performed. + final String displayName = "Data source ingest"; // RJCTODO: Need reset progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { @@ -492,7 +499,8 @@ public class IngestManager { if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } - return DataSourceTaskWorker.this.cancel(true); + IngestManager.getDefault().stopAll(); + return true; } }); progress.start(); @@ -505,9 +513,9 @@ public class IngestManager { return null; } - IngestJob ingestJob = scheduler.next(); - DataSourceIngestPipeline pipeline = ingestJob.getDataSourceIngestPipelineForThread(this.id); - pipeline.ingestDataSource(this, this.progress); + DataSourceIngestTask ingestJob = scheduler.next(); + DataSourceIngestTask.DataSourceIngestPipeline pipeline = ingestJob.getDataSourceIngestPipelineForThread(this.id); + pipeline.process(this, this.progress); } logger.log(Level.INFO, "Data source ingest thread {0} completed", this.id); @@ -549,10 +557,9 @@ public class IngestManager { protected Object doInBackground() throws Exception { logger.log(Level.INFO, String.format("File ingest thread {0} started", this.id)); - // Set up a progress bar with cancel capability. This is one of two ways - // that the worker can be canceled. The other way is via a call to - // IngestManager.stopAll(). - final String displayName = "File Ingest"; + // Set up a progress bar that can be used to cancel all of the + // ingest jobs currently being performed. + final String displayName = "File ingest"; progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { @@ -560,7 +567,8 @@ public class IngestManager { if (progress != null) { progress.setDisplayName(displayName + " (Cancelling...)"); } - return FileTaskWorker.this.cancel(true); + IngestManager.getDefault().stopAll(); + return true; } }); progress.start(); @@ -580,8 +588,8 @@ public class IngestManager { IngestScheduler.FileScheduler.FileTask task = fileScheduler.next(); AbstractFile file = task.getFile(); progress.progress(file.getName(), processedFiles); - FileIngestPipeline pipeline = task.getJob().getFileIngestPipelineForThread(this.id); - pipeline.ingestFile(file); + DataSourceIngestTask.FileIngestPipeline pipeline = task.getParent().getFileIngestPipelineForThread(this.id); + pipeline.process(file); // Update the progress bar. int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java index b770127061..93d6b0447e 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java @@ -365,7 +365,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { } private void init() { - List moduleFactories = IngestModuleLoader.getDefault().getIngestModuleFactories(); + List moduleFactories = IngestModuleLoader.getInstance().getIngestModuleFactories(); for (IngestModuleFactory factory : moduleFactories) { groupings.put(factory.getModuleDisplayName(), new HashMap>()); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java index 3ce388a2a0..787d769a8a 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -23,76 +23,14 @@ package org.sleuthkit.autopsy.ingest; */ public interface IngestModule { - public enum ResultCode { // RJCTODO: Do we really want to have this - + public enum ResultCode { OK, ERROR, - @Deprecated - NOT_SET }; - - // RJCTODO: Update - /** - * Invoked to allow an ingest module to set up internal data structures and - * acquire any private resources it will need during an ingest job. There - * will usually be more than one instance of a module working on an ingest - * job, but it is guaranteed that there will be no more than one instance of - * the module per thread. If these instances must share resources, the - * modules are responsible for synchronizing access to the shared resources - * and doing reference counting as required to release the resources - * correctly. - *

- * A module that uses the scheduling service to schedule additional - * processing needs to supply the ingest job ID passed to this method to the - * scheduler. For example, a module that extracts files from an archive file - * should schedule ingest of those files using the ingest job ID to ensure - * that the files will be processed as part of the same ingest job. - *

- * An ingest module that does not require initialization should extend the - * IngestModuleAdapter class to get a default implementation of this method - * that saves the ingest job id. - * - * @param ingestJobId Identifier for the ingest job. - * @param ingestOptions Module options for the ingest job. - */ - void startUp(IngestModuleContext context); - - /** - * RJCTODO - */ - IngestModuleContext getContext(); - // RJCTODO: Update - /** - * Invoked when an ingest job is completed, before the module instance is - * discarded. The module should respond by doing things like releasing - * private resources, submitting final results, and posting a final ingest - * message. - *

- * An ingest module that does not need to do anything when the ingest job - * completes should extend the IngestModuleAdapter class to get a default - * implementation of this method that does nothing. - */ - /** - * Invoked when an ingest job is canceled or otherwise terminated early, - * before the module instance is discarded. The module should respond by - * doing things like releasing private resources, discarding partial - * results, and posting a stopped ingest message. - *

- * An ingest module that does not need to do anything when the ingest job is - * canceled should extend the IngestModuleAdapter class to get a default - * implementation of this method that does nothing. - */ - /** - * Invoked after complete() or stop() is called to determine if the module - * has finished responding to the termination request. The module instance - * will be discarded when this method returns true. - *

- * An ingest module that does not need to do anything when the ingest job is - * completed or canceled should extend the IngestModuleAdapter class to get - * a default implementation of this method that returns true. - * - * @return True if the module is finished, false otherwise. - */ + // RJCTODO: Write header comment, make sure to mention "one module instance per thread" + void startUp(IngestModuleContext context) throws Exception; + + // RJCTODO: Write header comment, make sure to mention "one module instance per thread" void shutDown(boolean ingestJobWasCancelled); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java index 6a99efb641..e304457944 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java @@ -24,23 +24,11 @@ package org.sleuthkit.autopsy.ingest; */ public abstract class IngestModuleAdapter implements IngestModule { - private IngestModuleContext context; - @Override - public void startUp(IngestModuleContext context) { - this.context = context; + public void startUp(IngestModuleContext context) throws Exception { } - @Override - public IngestModuleContext getContext() { - return context; - } - @Override public void shutDown(boolean ingestJobCancelled) { } - - protected void setContext(IngestModuleContext context) { - this.context = context; - } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleContext.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleContext.java index e944f29b53..8d51e1a257 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleContext.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleContext.java @@ -34,14 +34,14 @@ import org.sleuthkit.datamodel.SleuthkitCase; */ public final class IngestModuleContext { - private final IngestJob ingestJob; + private final DataSourceIngestTask ingestJob; private final IngestModuleFactory moduleFactory; private final IngestManager ingestManager; private final IngestScheduler scheduler; private final Case autopsyCase; private final SleuthkitCase sleuthkitCase; - IngestModuleContext(IngestJob ingestJob, IngestModuleFactory moduleFactory) { + IngestModuleContext(DataSourceIngestTask ingestJob, IngestModuleFactory moduleFactory) { this.ingestJob = ingestJob; this.moduleFactory = moduleFactory; ingestManager = IngestManager.getDefault(); @@ -53,8 +53,8 @@ public final class IngestModuleContext { public boolean isIngestJobCancelled() { return this.ingestJob.isCancelled(); } - - /** + + /** * RJCTODO * * @return @@ -85,8 +85,8 @@ public final class IngestModuleContext { ingestManager.scheduleFileTask(ingestJob.getId(), file); } } - - public void postIngestMessage(long ID, IngestMessage.MessageType messageType, String subject, String detailsHtml) { + + public void postIngestMessage(long ID, IngestMessage.MessageType messageType, String subject, String detailsHtml) { IngestMessage message = IngestMessage.createMessage(ID, messageType, moduleFactory.getModuleDisplayName(), subject, detailsHtml); ingestManager.postIngestMessage(message); } @@ -125,18 +125,12 @@ public final class IngestModuleContext { public void logInfo(Class moduleClass, String message, Throwable ex) { Logger.getLogger(moduleClass.getName()).log(Level.INFO, message, ex); } - + public void logWarning(Class moduleClass, String message, Throwable ex) { Logger.getLogger(moduleClass.getName()).log(Level.WARNING, message, ex); } public void logError(Class moduleClass, String message, Throwable ex) { Logger.getLogger(moduleClass.getName()).log(Level.SEVERE, message, ex); - } - - // RJCTODO: Leave public or create blackboard attribute factory methods, - // perhaps as many as eleven. End goal is for this to be package - public String getModuleDisplayName() { - return this.moduleFactory.getModuleDisplayName(); - } + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java index 33a0b811e6..507f4c4cc8 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.sleuthkit.autopsy.ingest; import java.util.ArrayList; @@ -26,38 +25,36 @@ import java.util.logging.Level; import org.openide.util.Lookup; import org.sleuthkit.autopsy.coreutils.Logger; -// RJCTODO: Comment +/** + * Looks up loaded ingest module factories using NetBean global lookup. + */ final class IngestModuleLoader { + + private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); private static IngestModuleLoader instance; - private ArrayList moduleFactories = new ArrayList<>(); + private final List moduleFactories = new ArrayList<>(); private IngestModuleLoader() { + lookUpIngestModuleFactories(); } - synchronized static IngestModuleLoader getDefault() { + synchronized static IngestModuleLoader getInstance() { if (instance == null) { - Logger.getLogger(IngestModuleLoader.class.getName()).log(Level.INFO, "Creating ingest module loader instance"); instance = new IngestModuleLoader(); - instance.init(); } return instance; } - private void init() { - // RJCTODO: Add code to listen to changes in the collections, possibly restore listener code... - // RJCTODO: Since we were going to overwrite pipeline config every time and we are going to move the code modules - // into this package, we can simply handle the module ordering here, possibly just directly instantiating the core - // modules. - // RJCTODO: Make sure that sample modules are excluded - Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); - Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); - for (IngestModuleFactory factory : factories) { - logger.log(Level.INFO, "Loaded ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()}); - moduleFactories.add(factory); - } + List getIngestModuleFactories() { + return new ArrayList<>(moduleFactories); } - List getIngestModuleFactories() { - return moduleFactories; - } + private void lookUpIngestModuleFactories() { + // RJCTODO: Possibly add code to listen to changes in the collection and restore listener code... + Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); + for (IngestModuleFactory factory : factories) { + logger.log(Level.INFO, "Found ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()}); + moduleFactories.add(factory); + } + } } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleSettings.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleSettings.java index a791d99623..6798436124 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleSettings.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleSettings.java @@ -27,6 +27,7 @@ import java.io.Serializable; */ public interface IngestModuleSettings extends Serializable { + // RJCTODO: Keep this as a shell if that works, otherwise go with Serializable /** * Determines whether the per ingest job options are valid. * diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMonitor.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMonitor.java index 92a02ca442..958cfe2434 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMonitor.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMonitor.java @@ -36,8 +36,9 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil; /** * Monitor health of the system and stop ingest if necessary */ -final class IngestMonitor { +public final class IngestMonitor { + public static final int DISK_FREE_SPACE_UNKNOWN = -1; // RJCTODO: This is ugly private static final int INITIAL_INTERVAL_MS = 60000; //1 min. private final Logger logger = Logger.getLogger(IngestMonitor.class.getName()); private Timer timer; diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java new file mode 100755 index 0000000000..1224f91849 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java @@ -0,0 +1,129 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2012-2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.ingest; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.XMLUtil; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.NodeList; + +/** + * Provides data source and file ingest pipeline configurations as ordered lists + * of ingest module class names. The order of the module class names indicates + * the desired sequence of ingest module instances in an ingest modules + * pipeline. + */ +final class IngestPipelinesConfiguration { + + private static final Logger logger = Logger.getLogger(IngestPipelinesConfiguration.class.getName()); + private final static String PIPELINES_CONFIG_FILE = "pipeline_config.xml"; + private final static String PIPELINES_CONFIG_FILE_XSD = "PipelineConfigSchema.xsd"; + private static final String XML_PIPELINE_ELEM = "PIPELINE"; + private static final String XML_PIPELINE_TYPE_ATTR = "type"; + private final static String DATA_SOURCE_INGEST_PIPELINE_TYPE = "ImageAnalysis"; + private final static String FILE_INGEST_PIPELINE_TYPE = "FileAnalysis"; + private static final String XML_MODULE_ELEM = "MODULE"; + private static final String XML_MODULE_CLASS_NAME_ATTR = "location"; + private static IngestPipelinesConfiguration instance; + private final List dataSourceIngestPipelineConfig = new ArrayList<>(); + private final List fileIngestPipelineConfig = new ArrayList<>(); + + private IngestPipelinesConfiguration() { + readPipelinesConfigurationFile(); + } + + synchronized static IngestPipelinesConfiguration getInstance() { + if (instance == null) { + Logger.getLogger(IngestModuleLoader.class.getName()).log(Level.INFO, "Creating ingest module loader instance"); + instance = new IngestPipelinesConfiguration(); + } + return instance; + } + + List getDataSourceIngestPipelineConfig() { + return new ArrayList<>(dataSourceIngestPipelineConfig); + } + + List getFileIngestPipelineConfig() { + return new ArrayList<>(fileIngestPipelineConfig); + } + + private void readPipelinesConfigurationFile() { + String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINES_CONFIG_FILE; + Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, configFilePath, PIPELINES_CONFIG_FILE_XSD); + if (doc == null) { + return; + } + + Element rootElement = doc.getDocumentElement(); + if (rootElement == null) { + logger.log(Level.SEVERE, "Invalid pipelines config file"); + return; + } + + NodeList pipelineElements = rootElement.getElementsByTagName(XML_PIPELINE_ELEM); + int numPipelines = pipelineElements.getLength(); + if (numPipelines < 1 || numPipelines > 2) { + logger.log(Level.SEVERE, "Invalid pipelines config file"); + return; + } + + List pipelineConfig = null; + for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { + Element pipelineElement = (Element) pipelineElements.item(pipelineNum); + String pipelineTypeAttr = pipelineElement.getAttribute(XML_PIPELINE_TYPE_ATTR); + if (pipelineTypeAttr != null) { + switch (pipelineTypeAttr) { + case DATA_SOURCE_INGEST_PIPELINE_TYPE: + pipelineConfig = dataSourceIngestPipelineConfig; + break; + case FILE_INGEST_PIPELINE_TYPE: + pipelineConfig = fileIngestPipelineConfig; + break; + default: + logger.log(Level.SEVERE, "Invalid pipelines config file"); + return; + } + } + + // Create an ordered list of class names. The sequence of class + // names defines the sequence of modules in the pipeline. + if (pipelineConfig != null) { + NodeList modulesElems = pipelineElement.getElementsByTagName(XML_MODULE_ELEM); + int numModules = modulesElems.getLength(); + if (numModules == 0) { + break; + } + for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { + Element moduleElement = (Element) modulesElems.item(moduleNum); + final String moduleClassName = moduleElement.getAttribute(XML_MODULE_CLASS_NAME_ATTR); + if (moduleClassName != null) { + pipelineConfig.add(moduleClassName); + } + } + } + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java index 1a9287e7ad..b5bc08bafa 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java @@ -148,7 +148,7 @@ final class IngestScheduler { return sb.toString(); } - synchronized void scheduleIngestOfFiles(IngestJob dataSourceTask) { + synchronized void scheduleIngestOfFiles(DataSourceIngestTask dataSourceTask) { // RJCTODO: This should go to the ingest manager as the job manager? // Save the data source task to manage its pipelines. //dataSourceTasks.put(dataSourceTask.getId(), dataSourceTask); @@ -212,7 +212,7 @@ final class IngestScheduler { * @param originalContext original content schedule context that was used * to schedule the parent origin content, with the modules, settings, etc. */ - synchronized void scheduleIngestOfDerivedFile(IngestJob ingestJob, AbstractFile file) { + synchronized void scheduleIngestOfDerivedFile(DataSourceIngestTask ingestJob, AbstractFile file) { FileTask fileTask = new FileTask(file, ingestJob); if (shouldEnqueueTask(fileTask)) { fileTasks.addFirst(fileTask); @@ -338,7 +338,7 @@ final class IngestScheduler { for (Content c : children) { if (c instanceof AbstractFile) { AbstractFile childFile = (AbstractFile) c; - FileTask childTask = new FileTask(childFile, parentTask.getJob()); + FileTask childTask = new FileTask(childFile, parentTask.getParent()); if (childFile.hasChildren()) { this.directoryTasks.add(childTask); @@ -373,13 +373,13 @@ final class IngestScheduler { final Set contentSet = new HashSet<>(); for (FileTask task : rootDirectoryTasks) { - contentSet.add(task.getJob().getDataSource()); + contentSet.add(task.getParent().getDataSource()); } for (FileTask task : directoryTasks) { - contentSet.add(task.getJob().getDataSource()); + contentSet.add(task.getParent().getDataSource()); } for (FileTask task : fileTasks) { - contentSet.add(task.getJob().getDataSource()); + contentSet.add(task.getParent().getDataSource()); } return new ArrayList<>(contentSet); @@ -402,7 +402,7 @@ final class IngestScheduler { final AbstractFile aFile = processTask.file; //if it's unalloc file, skip if so scheduled - if (processTask.getJob().getProcessUnallocatedSpace() == false + if (processTask.getParent().shouldProcessUnallocatedSpace() == false && aFile.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS //unalloc files )) { return false; @@ -462,15 +462,15 @@ final class IngestScheduler { */ static class FileTask { private final AbstractFile file; - private final IngestJob ingetsJob; + private final DataSourceIngestTask task; - public FileTask(AbstractFile file, IngestJob dataSourceTask) { + public FileTask(AbstractFile file, DataSourceIngestTask task) { this.file = file; - this.ingetsJob = dataSourceTask; + this.task = task; } - public IngestJob getJob() { - return ingetsJob; + public DataSourceIngestTask getParent() { // RJCTODO: Provide wrappers to get rid of train-style calls + return task; } public AbstractFile getFile() { @@ -509,8 +509,8 @@ final class IngestScheduler { if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) { return false; } - IngestJob thisTask = this.getJob(); - IngestJob otherTask = other.getJob(); + DataSourceIngestTask thisTask = this.getParent(); + DataSourceIngestTask otherTask = other.getParent(); if (thisTask != otherTask && (thisTask == null || !thisTask.equals(otherTask))) { @@ -766,15 +766,15 @@ final class IngestScheduler { /** * DataSourceScheduler ingest scheduler */ - static class DataSourceScheduler implements Iterator { + static class DataSourceScheduler implements Iterator { - private LinkedList tasks; + private LinkedList tasks; DataSourceScheduler() { tasks = new LinkedList<>(); } - synchronized void schedule(IngestJob task) { + synchronized void schedule(DataSourceIngestTask task) { try { if (task.getDataSource().getParent() != null) { //only accepting parent-less content objects (Image, parentless VirtualDirectory) @@ -790,12 +790,12 @@ final class IngestScheduler { } @Override - public synchronized IngestJob next() throws IllegalStateException { + public synchronized DataSourceIngestTask next() throws IllegalStateException { if (!hasNext()) { throw new IllegalStateException("There is no data source tasks in the queue, check hasNext()"); } - final IngestJob ret = tasks.pollFirst(); + final DataSourceIngestTask ret = tasks.pollFirst(); return ret; } @@ -806,7 +806,7 @@ final class IngestScheduler { */ synchronized List getContents() { List contents = new ArrayList(); - for (IngestJob task : tasks) { + for (DataSourceIngestTask task : tasks) { contents.add(task.getDataSource()); } return contents; @@ -834,7 +834,7 @@ final class IngestScheduler { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("DataSourceQueue, size: ").append(getCount()); - for (IngestJob task : tasks) { + for (DataSourceIngestTask task : tasks) { sb.append(task.toString()).append(" "); } return sb.toString(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java index 2ea75b6afd..b318145b17 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java @@ -18,10 +18,14 @@ */ package org.sleuthkit.autopsy.ingest; +import java.io.File; +import java.util.Collection; +import java.util.List; import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.SleuthkitCase; /** @@ -30,10 +34,10 @@ import org.sleuthkit.datamodel.SleuthkitCase; * singleton instance. */ public final class IngestServices { - public static final int DISK_FREE_SPACE_UNKNOWN = -1; + public static final int DISK_FREE_SPACE_UNKNOWN = -1; // RJCTODO: Move this back to the monitor or ingest manager? It is used here... + private static final Logger logger = Logger.getLogger(IngestServices.class.getName()); private IngestManager manager; - private Logger logger = Logger.getLogger(IngestServices.class.getName()); private static IngestServices instance; private IngestServices() { @@ -41,9 +45,9 @@ public final class IngestServices { } /** - * Get handle to singletone module services + * Get the ingest services. * - * @return the services handle + * @return The ingest services singleton. */ public static synchronized IngestServices getDefault() { if (instance == null) { @@ -53,34 +57,38 @@ public final class IngestServices { } /** - * Get access to the current Case handle. Note: When storing the Case - * database handle as a member variable in a module, this method needs to be - * called within the module's init() method and the member variable needs to - * be updated at each init(), to ensure the correct Case handle is being - * used if the Case is changed. + * Get the current Autopsy case. * - * @return current Case + * @return The current case. */ public Case getCurrentCase() { return Case.getCurrentCase(); } /** - * Get access to the current Case database handle. Like storing the Case - * handle, call this method and update member variables for each call to the - * module's init() method to ensure it is correct. + * Get the current SleuthKit case. The SleuthKit case is the case database. * - * @return current Case database + * @return The current case database. */ public SleuthkitCase getCurrentSleuthkitCaseDb() { return Case.getCurrentCase().getSleuthkitCase(); } + /** + * Get a logger that incorporates the display name of an ingest module in + * messages written to the Autopsy log files. + * + * @param moduleClassName The display name of the ingest module. + * @return The custom logger for the ingest module. + */ + public Logger getLogger(String moduleDisplayName) { + return Logger.getLogger(moduleDisplayName); + } + /** - * Post ingest message to the inbox. This should be done for analysis - * messages. + * Post message to the ingest messages in box. * - * @param message ingest message to be posted by ingest module + * @param message An ingest message */ public void postMessage(final IngestMessage message) { manager.postIngestMessage(message); @@ -120,7 +128,7 @@ public final class IngestServices { IngestManager.fireModuleContentEvent(moduleContentEvent); } - // RJCTODO: + // RJCTODO: This can stay in the context since it is context (pipeline) specific /** * Schedule a new file for ingest with the same settings as the file being * analyzed. This is used, for example, when opening an archive file. File @@ -142,21 +150,5 @@ public final class IngestServices { */ public long getFreeDiskSpace() { return manager.getFreeDiskSpace(); - } - - // RJCTODO - /** - * Facility for a file ingest module to check a return value from a - * previously run file ingest module that executed for the same file. The - * module return value can be used as a guideline to skip processing the - * file - * - * @param moduleName registered module name of the module to check the - * return value of - * @return the return value of the previously executed module for the - * currently processed file in the file ingest pipeline - */ -// public IngestModule.ResultCode getAbstractFileModuleResult(String moduleName) { -// return manager.getAbstractFileModuleResult(moduleName); -// } -} + } + } diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java index 9acc3dccdb..ff63438a5d 100644 --- a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java @@ -38,7 +38,6 @@ import org.sleuthkit.autopsy.coreutils.ImageUtils; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; import org.sleuthkit.autopsy.ingest.FileIngestModule; -import org.sleuthkit.autopsy.ingest.IngestModuleTempApiShim; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; @@ -55,7 +54,7 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; * files. Ingests an image file and, if available, adds it's date, latitude, * longitude, altitude, device model, and device make to a blackboard artifact. */ -public final class ExifParserFileIngestModule extends IngestModuleAdapter implements FileIngestModule, IngestModuleTempApiShim { +public final class ExifParserFileIngestModule extends IngestModuleAdapter implements FileIngestModule { private IngestServices services; private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); @@ -66,7 +65,8 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem } @Override - public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) { + public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) throws Exception { + super.startUp(context); services = IngestServices.getDefault(); logger.log(Level.INFO, "init() {0}", this.toString()); filesProcessed = 0; diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java index ec5825073f..5abbdabb50 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java @@ -58,7 +58,8 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements } @Override - public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) { + public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) throws Exception { + super.startUp(context); services = IngestServices.getDefault(); FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault(); SigTypeToExtMap = xmlLoader.load(); diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 576bc70a56..966e2c9890 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -44,6 +44,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.datamodel.HashInfo; +// RJCTODO: Storeis for a) peristing context-sensitive module settings and b) adapt core modules to use module settings (more important) public class HashDbIngestModule extends IngestModuleAdapter implements FileIngestModule { private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName()); private static final int MAX_COMMENT_SIZE = 500; @@ -62,7 +63,8 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges } @Override - public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) { + public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) throws Exception { + super.startUp(context); services = IngestServices.getDefault(); skCase = Case.getCurrentCase().getSleuthkitCase(); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index bef6beed0b..1883010dc4 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -298,8 +298,8 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme * */ @Override - public void startUp(IngestModuleContext context) { - setContext(context); + public void startUp(IngestModuleContext context) throws Exception { + super.startUp(context); logger.log(Level.INFO, "init()"); services = IngestServices.getDefault(); initialized = false; diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java index 30ec4e1ee4..1e7a6d6c3b 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java @@ -19,8 +19,6 @@ package org.sleuthkit.autopsy.keywordsearch; -import java.io.Serializable; -import javax.swing.JPanel; import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.sleuthkit.autopsy.coreutils.Version; @@ -48,7 +46,7 @@ public class KeywordSearchModuleFactory extends IngestModuleFactoryAdapter { @Override public String getModuleDescription() { - return NbBundle.getMessage(KeywordSearchIngestModule.class, "HashDbInKeywordSearchIngestModulegestModule.moduleDescription"); + return NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleDescription"); } @Override diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 2c2aa49b0b..5f623f0da2 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -54,26 +54,26 @@ import org.sleuthkit.datamodel.TskData; */ class Chrome extends Extract { - private static final Logger logger = Logger.getLogger(Chrome.class.getName()); private static final String historyQuery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " + "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) as from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads"; private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; + private final Logger logger = Logger.getLogger(this.getClass().getName()); Chrome() { moduleName = "Chrome"; } @Override - public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { dataFound = false; - this.getHistory(dataSource, controller); - this.getBookmark(dataSource, controller); - this.getCookie(dataSource, controller); - this.getLogin(dataSource, controller); - this.getDownload(dataSource, controller); + this.getHistory(dataSource, statusHelper); + this.getBookmark(dataSource, statusHelper); + this.getCookie(dataSource, statusHelper); + this.getLogin(dataSource, statusHelper); + this.getDownload(dataSource, statusHelper); } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index ac59e6809c..4dfe565d38 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -35,19 +35,26 @@ import org.sleuthkit.datamodel.*; abstract class Extract { - private static final Logger logger = Logger.getLogger(Extract.class.getName()); - protected Case currentCase = Case.getCurrentCase(); // RJCTODO: Fix this - protected SleuthkitCase tskCase = currentCase.getSleuthkitCase(); // RJCTODO: Fix this + protected Case currentCase = Case.getCurrentCase(); + protected SleuthkitCase tskCase = currentCase.getSleuthkitCase(); + public final Logger logger = Logger.getLogger(this.getClass().getName()); private final ArrayList errorMessages = new ArrayList<>(); - String moduleName = ""; // RJCTODO: Fix this - boolean dataFound = false; // RJCTODO: Fix this + String moduleName = ""; + boolean dataFound = false; Extract() { - dataFound = false; } - // RJCTODO: Consider renaming - abstract void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller); + void init() throws Exception { + } + + abstract void process(Content dataSource, DataSourceIngestModuleStatusHelper controller); + + void complete() { + } + + void stop() { + } /** * Returns a List of string error messages from the inheriting class @@ -82,7 +89,7 @@ abstract class Extract { BlackboardArtifact bbart = content.newArtifact(type); bbart.addAttributes(bbattributes); } catch (TskException ex) { - logger.log(Level.SEVERE, "Error while trying to add an artifact: {0}", ex); // RJCTODO: Add extracter name + logger.log(Level.SEVERE, "Error while trying to add an artifact: {0}", ex); } } @@ -138,26 +145,15 @@ abstract class Extract { return list; } - void complete() { - } - - void stop() { - } - /** * Returns the name of the inheriting class * * @return Gets the moduleName set in the moduleName data member */ - protected String getName() { // RJCTODO: Fix this + protected String getName() { return moduleName; } - // RJCTODO: Wire this in. - protected void setDataFound(boolean foundData) { - dataFound = foundData; - } - public boolean foundData() { return dataFound; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index fac795479d..a53c9b611b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -65,7 +65,7 @@ import org.sleuthkit.datamodel.*; */ class ExtractIE extends Extract { private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); - private IngestServices services; + private IngestServices services = IngestServices.getDefault(); private String moduleTempResultsDir; private String PASCO_LIB_PATH; private String JAVA_PATH; @@ -79,7 +79,7 @@ class ExtractIE extends Extract { } @Override - public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { dataFound = false; this.getBookmark(dataSource, controller); this.getCookie(dataSource, controller); @@ -93,7 +93,7 @@ class ExtractIE extends Extract { */ private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List favoritesFiles = null; + List favoritesFiles; try { favoritesFiles = fileManager.findFiles(dataSource, "%.url", "Favorites"); } catch (TskCoreException ex) { @@ -125,7 +125,7 @@ class ExtractIE extends Extract { datetime = Long.valueOf(Tempdate); String domain = Util.extractDomain(url); - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", name)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", datetime)); @@ -172,7 +172,7 @@ class ExtractIE extends Extract { */ private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List cookiesFiles = null; + List cookiesFiles; try { cookiesFiles = fileManager.findFiles(dataSource, "%.txt", "Cookies"); } catch (TskCoreException ex) { @@ -213,7 +213,7 @@ class ExtractIE extends Extract { datetime = Long.valueOf(tempDate); String domain = Util.extractDomain(url); - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); @@ -462,19 +462,11 @@ class ExtractIE extends Extract { fileScanner.close(); } -// @Override -// public void init() { -// services = IngestServices.getDefault(); -// } - @Override public void stop() { if (execPasco != null) { execPasco.stop(); execPasco = null; - } - - //call regular cleanup from complete() method - complete(); + } } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 29e2672d66..c829cfbabf 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -101,12 +101,6 @@ class ExtractRegistry extends Extract { RR_FULL_PATH = "perl " + rrFullHome + File.separator + "rip.pl"; } } - -// @Override -// public String getVersion() { -// return MODULE_VERSION; -// } - /** * Search for the registry hives on the system. @@ -532,7 +526,7 @@ class ExtractRegistry extends Extract { } @Override - public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { analyzeRegistryFiles(dataSource, controller); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractUSB.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractUSB.java index 41c605df4c..b9a40d6407 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractUSB.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractUSB.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2012 Basis Technology Corp. + * Copyright 2012-2014 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -89,7 +89,7 @@ class ExtractUSB { * @throws IOException */ private void loadDeviceMap() throws FileNotFoundException, IOException { - devices = new HashMap(); + devices = new HashMap<>(); PlatformUtil.extractResourceToUserConfigDir(this.getClass(), DataFile); try (Scanner dat = new Scanner(new FileInputStream(new java.io.File(PlatformUtil.getUserConfigDirectory() + File.separator + "USB_DATA.txt")))) { String line = dat.nextLine(); @@ -152,6 +152,7 @@ class ExtractUSB { return product; } + @Override public String toString() { return vendor + product; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 1f8c759c88..71ec153ea8 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -50,7 +50,6 @@ import org.sleuthkit.datamodel.TskCoreException; */ class Firefox extends Extract { - private static final Logger logger = Logger.getLogger(Firefox.class.getName()); private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; @@ -63,7 +62,7 @@ class Firefox extends Extract { } @Override - public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { dataFound = false; this.getHistory(dataSource, controller); this.getBookmark(dataSource, controller); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 08ae6bce1b..c55c6c054c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -43,18 +43,22 @@ import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; public final class RAImageIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName()); - private IngestServices services; private static int messageId = 0; + private final List extracters = new ArrayList<>(); + private final List browserExtracters = new ArrayList<>(); + private IngestServices services; private StringBuilder subCompleted = new StringBuilder(); - private ArrayList extracters; - private List browserExtracters; RAImageIngestModule() { } + synchronized int getNextMessageId() { + return ++messageId; + } + @Override public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName())); + services.postMessage(IngestMessage.createMessage(getNextMessageId(), MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName())); controller.switchToDeterminate(extracters.size()); controller.progress(0); @@ -68,7 +72,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da } try { - extracter.extractRecentActivity(dataSource, controller); + extracter.process(dataSource, controller); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); subCompleted.append(extracter.getName()).append(" failed - see log for details
"); @@ -99,7 +103,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da errorMessage.append("

No errors encountered.

"); errorMsgSubject = "No errors reported"; } - final IngestMessage msg = IngestMessage.createMessage(++messageId, msgLevel, RecentActivityExtracterModuleFactory.getModuleName(), "Finished " + dataSource.getName() + " - " + errorMsgSubject, errorMessage.toString()); + final IngestMessage msg = IngestMessage.createMessage(getNextMessageId(), msgLevel, RecentActivityExtracterModuleFactory.getModuleName(), "Finished " + dataSource.getName() + " - " + errorMsgSubject, errorMessage.toString()); services.postMessage(msg); StringBuilder historyMsg = new StringBuilder(); @@ -110,7 +114,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da historyMsg.append(""); } historyMsg.append(""); - final IngestMessage inboxMsg = IngestMessage.createMessage(++messageId, MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), dataSource.getName() + " - Browser Results", historyMsg.toString()); + final IngestMessage inboxMsg = IngestMessage.createMessage(getNextMessageId(), MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), dataSource.getName() + " - Browser Results", historyMsg.toString()); services.postMessage(inboxMsg); return ResultCode.OK; @@ -135,39 +139,36 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da } @Override - public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) { - extracters = new ArrayList<>(); - browserExtracters = new ArrayList<>(); + public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) Exception { + super.startUp(context); services = IngestServices.getDefault(); - final Extract registry = new ExtractRegistry(); - final Extract iexplore = new ExtractIE(); - final Extract recentDocuments = new RecentDocumentsByLnk(); - final Extract chrome = new Chrome(); - final Extract firefox = new Firefox(); - final Extract SEUQA = new SearchEngineURLQueryAnalyzer(); + Extract registry = new ExtractRegistry(); + Extract iexplore = new ExtractIE(); + Extract recentDocuments = new RecentDocumentsByLnk(); + Extract chrome = new Chrome(); + Extract firefox = new Firefox(); + Extract SEUQA = new SearchEngineURLQueryAnalyzer(); extracters.add(chrome); extracters.add(firefox); extracters.add(iexplore); extracters.add(recentDocuments); - // this needs to run after the web browser modules - extracters.add(SEUQA); - - // this runs last because it is slowest - extracters.add(registry); + extracters.add(SEUQA); // this needs to run after the web browser modules + extracters.add(registry); // this runs last because it is slowest // RJCTODO: Why? browserExtracters.add(chrome); browserExtracters.add(firefox); browserExtracters.add(iexplore); - - for (Extract extracter : extracters) { + + for (Extract extracter : extracters) { try { -// extracter.init(); // RJCTODO + extracter.init(); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception during init() of " + extracter.getName(), ex); + throw new IngestModuleException(ex.getMessage()); } - } + } } private void stop() { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java index 3bff0e13d6..56e758b84a 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java @@ -47,7 +47,6 @@ import org.sleuthkit.datamodel.*; class RecentDocumentsByLnk extends Extract { private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName()); private IngestServices services; - final private static String MODULE_VERSION = "1.0"; /** * Find the documents that Windows stores about recent documents and make artifacts. @@ -57,11 +56,11 @@ class RecentDocumentsByLnk extends Extract { private void getRecentDocuments(Content dataSource, DataSourceIngestModuleStatusHelper controller) { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List recentFiles = null; + List recentFiles; try { recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent"); } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error searching for .lnk files."); + logger.log(Level.WARNING, "Error searching for .lnk files.", ex); this.addErrorMessage(this.getName() + ": Error getting lnk Files."); return; } @@ -106,7 +105,7 @@ class RecentDocumentsByLnk extends Extract { } @Override - public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { dataFound = false; this.getRecentDocuments(dataSource, controller); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index 8a592f8a59..4f0253ad55 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -60,7 +60,6 @@ import org.xml.sax.SAXException; */ class SearchEngineURLQueryAnalyzer extends Extract { - private static final Logger logger = Logger.getLogger(SearchEngineURLQueryAnalyzer.class.getName()); private static final String XMLFILE = "SEUQAMappings.xml"; private static final String XSDFILE = "SearchEngineSchema.xsd"; private static String[] searchEngineNames; @@ -123,7 +122,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { String EngineName = nnm.getNamedItem("engine").getNodeValue(); String EnginedomainSubstring = nnm.getNamedItem("domainSubstring").getNodeValue(); - Map splits = new HashMap(); + Map splits = new HashMap<>(); NodeList listSplits = xmlinput.getElementsByTagName("splitToken"); for (int k = 0; k < listSplits.getLength(); k++) { @@ -307,23 +306,21 @@ class SearchEngineURLQueryAnalyzer extends Extract { } @Override - public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { this.getURLs(dataSource, controller); logger.info("Search Engine stats: \n" + getTotals()); } - // RJCTODO: Move to ctor or something -// @Override -// public void init(IngestModuleInit initContext) { -// try { -// services = IngestServices.getDefault(); -// PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE); -// init2(); -// } catch (IOException e) { -// logger.log(Level.SEVERE, "Unable to find " + XMLFILE, e); -// } -// } -// + @Override + void init() throws Exception { + try { + PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE); + init2(); + } catch (IOException e) { + logger.log(Level.SEVERE, "Unable to find " + XMLFILE, e); + } + } + private void init2() { try { String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE; @@ -347,4 +344,14 @@ class SearchEngineURLQueryAnalyzer extends Extract { logger.log(Level.SEVERE, "Unable to parse XML file", sxe); } } + + @Override + public void complete() { + logger.info("Search Engine URL Query Analyzer has completed."); + } + + @Override + public void stop() { + logger.info("Attempted to stop Search Engine URL Query Analyzer, but operation is not supported; skipping..."); + } } diff --git a/ScalpelCarver/nbproject/project.xml b/ScalpelCarver/nbproject/project.xml index 09a3bb2f87..2f4a9a1ac3 100644 --- a/ScalpelCarver/nbproject/project.xml +++ b/ScalpelCarver/nbproject/project.xml @@ -6,6 +6,14 @@ org.sleuthkit.autopsy.scalpel + + org.openide.util.lookup + + + + 8.19.1 + + org.sleuthkit.autopsy.core diff --git a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java index 958731c5cd..ecc46237a8 100644 --- a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java +++ b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java @@ -18,268 +18,226 @@ */ package org.sleuthkit.autopsy.scalpel; -//import java.io.File; -//import java.io.IOException; -//import java.util.ArrayList; -//import java.util.List; -//import java.util.logging.Level; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import org.sleuthkit.autopsy.coreutils.PlatformUtil; -//import org.sleuthkit.autopsy.coreutils.Version; -//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.autopsy.ingest.IngestServices; -//import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -//import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta; -//import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver; -//import org.sleuthkit.autopsy.scalpel.jni.ScalpelException; -//import org.sleuthkit.datamodel.AbstractFile; -//import org.sleuthkit.datamodel.Content; -//import org.sleuthkit.datamodel.FileSystem; -//import org.sleuthkit.datamodel.Image; -//import org.sleuthkit.datamodel.LayoutFile; -//import org.sleuthkit.datamodel.SleuthkitCase; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; -//import org.sleuthkit.datamodel.TskFileRange; -//import org.sleuthkit.datamodel.Volume; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult; +import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; +import org.sleuthkit.autopsy.ingest.IngestModuleContext; +import org.sleuthkit.autopsy.ingest.IngestModuleInit; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta; +import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver; +import org.sleuthkit.autopsy.scalpel.jni.ScalpelException; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.FileSystem; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.LayoutFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; +import org.sleuthkit.datamodel.TskFileRange; +import org.sleuthkit.datamodel.Volume; /** * Scalpel carving ingest module */ -//class ScalpelCarverIngestModule { // extends IngestModuleAbstractFile { // disable autodiscovery for now { -// -// private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName()); -// -// private static ScalpelCarverIngestModule instance; -// private final String MODULE_NAME = "Scalpel Carver"; -// private final String MODULE_DESCRIPTION = "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree."; -// private final String MODULE_VERSION = Version.getVersion(); -// private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; -// private String moduleOutputDirPath; -// private String configFileName = "scalpel.conf"; -// private String configFilePath; -// private boolean initialized = false; -// private ScalpelCarver carver; -// -// private ScalpelCarverIngestModule() { -// ScalpelCarver.init(); -// } -// -// // @Override -// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { -// -// if (!initialized) { -// return ProcessResult.OK; -// } -// -// // only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS -// TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType(); -// if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) { -// return ProcessResult.OK; -// } -// -// // create the output directory for this run -// String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId(); -// File scalpelOutputDir = new File(scalpelOutputDirPath); -// if (!scalpelOutputDir.exists()) { -// if (!scalpelOutputDir.mkdir()) { -// logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath); -// return ProcessResult.OK; -// } -// } -// -// // find the ID of the parent FileSystem, Volume or Image -// long id = -1; -// Content parent = null; -// try { -// parent = abstractFile.getParent(); -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex); -// } -// while (parent != null) { -// if (parent instanceof FileSystem || -// parent instanceof Volume || -// parent instanceof Image) { -// id = parent.getId(); -// break; -// } -// try { -// parent = parent.getParent(); -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex); -// } -// } -// -// // make sure we have a valid systemID -// if (id == -1) { -// logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile."); -// return ProcessResult.OK; -// } -// -// // carve the AbstractFile -// List output = null; -// try { -// output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath); -// } catch (ScalpelException ex) { -// logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId()); -// return ProcessResult.OK; -// } -// -// -// // get the image's size -// long imageSize = Long.MAX_VALUE; -// try { -// -// imageSize = abstractFile.getImage().getSize(); -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "Could not obtain the image's size."); -// } -// -// // add a carved file to the DB for each file that scalpel carved -// SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase(); -// List carvedFiles = new ArrayList(output.size()); -// for (CarvedFileMeta carvedFileMeta : output) { -// -// // calculate the byte offset of this carved file -// long byteOffset; -// try { -// byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart()); -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")"); -// break; -// } -// -// // get the size of the carved file -// long size = carvedFileMeta.getByteLength(); -// -// // create the list of TskFileRange objects -// List data = new ArrayList(); -// data.add(new TskFileRange(byteOffset, size, 0)); -// -// // add the carved file -// try { -// carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data)); -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex); -// } -// } -// -// // get the IngestServices object -// IngestServices is = IngestServices.getDefault(); -// -// // get the parent directory of the carved files -// Content carvedFileDir = null; -// if (!carvedFiles.isEmpty()) { -// try { -// carvedFileDir = carvedFiles.get(0).getParent(); -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex); -// } -// } -// -// // send a notification about the carved files directory -// if (carvedFileDir != null) { -// is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir)); -// } else { -// logger.log(Level.SEVERE, "Could not obtain the carved files directory."); -// } -// -// // reschedule carved files -// for (LayoutFile carvedFile : carvedFiles) { -// is.scheduleFile(carvedFile, pipelineContext); -// } -// -// return ProcessResult.OK; -// } -// -// -// public static ScalpelCarverIngestModule getDefault() { -// if (instance == null) { -// synchronized (ScalpelCarverIngestModule.class) { -// if (instance == null) { -// instance = new ScalpelCarverIngestModule(); -// } -// } -// } -// return instance; -// } -// -// // @Override -// public void init(IngestModuleInit initContext) { -// -// // make sure this is Windows -// String os = System.getProperty("os.name"); -// if (!os.startsWith("Windows")) { -// logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time."); -// return; -// } -// -// -// carver = new ScalpelCarver(); -// if (! carver.isInitialized()) { -// logger.log(Level.SEVERE, "Error initializing scalpel carver. "); -// return; -// } -// -// // make sure module output directory exists; create it if it doesn't -// moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + -// File.separator + MODULE_OUTPUT_DIR_NAME; -// File moduleOutputDir = new File(moduleOutputDirPath); -// if (!moduleOutputDir.exists()) { -// if (!moduleOutputDir.mkdir()) { -// logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module."); -// return; -// } -// } -// -// // create path to scalpel config file in user's home directory -// configFilePath = PlatformUtil.getUserConfigDirectory() -// + File.separator + configFileName; -// -// // copy the default config file to the user's home directory if one -// // is not already there -// try { -// PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName); -// } catch (IOException ex) { -// logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex); -// return; -// } -// -// initialized = true; -// } -// -// // @Override -// public void complete() { } -// -// // @Override -// public void stop() { } -// -// // @Override -// public String getName() { -// return MODULE_NAME; -// } -// -// // @Override -// public String getVersion() { -// return MODULE_VERSION; -// } -// -// // @Override -// public String getDescription() { -// return MODULE_DESCRIPTION; -// } -// -// // @Override -// public boolean hasBackgroundJobsRunning() { -// return false; -// } -// -// -// -// -// -// -//} + +class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileIngestModule { + + private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName()); + private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; + private String moduleOutputDirPath; + private String configFileName = "scalpel.conf"; + private String configFilePath; + private boolean initialized = false; + private ScalpelCarver carver; + private IngestModuleContext context; + + ScalpelCarverIngestModule() { + } + + @Override + public ResultCode process(AbstractFile abstractFile) { + ScalpelCarver.init(); + + if (!initialized) { + return ResultCode.OK; + } + + // only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS + TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType(); + if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) { + return ResultCode.OK; + } + + // create the output directory for this run + String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId(); + File scalpelOutputDir = new File(scalpelOutputDirPath); + if (!scalpelOutputDir.exists()) { + if (!scalpelOutputDir.mkdir()) { + logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath); + return ResultCode.OK; + } + } + + // find the ID of the parent FileSystem, Volume or Image + long id = -1; + Content parent = null; + try { + parent = abstractFile.getParent(); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex); + } + while (parent != null) { + if (parent instanceof FileSystem || + parent instanceof Volume || + parent instanceof Image) { + id = parent.getId(); + break; + } + try { + parent = parent.getParent(); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex); + } + } + + // make sure we have a valid systemID + if (id == -1) { + logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile."); + return ResultCode.OK; + } + + // carve the AbstractFile + List output = null; + try { + output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath); + } catch (ScalpelException ex) { + logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId()); + return ResultCode.OK; + } + + + // get the image's size + long imageSize = Long.MAX_VALUE; + try { + + imageSize = abstractFile.getImage().getSize(); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Could not obtain the image's size."); + } + + // add a carved file to the DB for each file that scalpel carved + SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase(); + List carvedFiles = new ArrayList(output.size()); + for (CarvedFileMeta carvedFileMeta : output) { + + // calculate the byte offset of this carved file + long byteOffset; + try { + byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart()); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")"); + break; + } + + // get the size of the carved file + long size = carvedFileMeta.getByteLength(); + + // create the list of TskFileRange objects + List data = new ArrayList(); + data.add(new TskFileRange(byteOffset, size, 0)); + + // add the carved file + try { + carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex); + } + } + + // get the IngestServices object + IngestServices is = IngestServices.getDefault(); + + // get the parent directory of the carved files + Content carvedFileDir = null; + if (!carvedFiles.isEmpty()) { + try { + carvedFileDir = carvedFiles.get(0).getParent(); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex); + } + } + + // send a notification about the carved files directory + if (carvedFileDir != null) { + is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir)); + } else { + logger.log(Level.SEVERE, "Could not obtain the carved files directory."); + } + + // reschedule carved files + + for (LayoutFile carvedFile : carvedFiles) { + is.scheduleFile(carvedFile, pipelineContext); + } + + return ResultCode.OK; + } + + @Override + public void startUp(IngestModuleContext context) throws IngestModuleException { + this.context = context; + + // make sure this is Windows + String os = System.getProperty("os.name"); + if (!os.startsWith("Windows")) { + logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time."); + return; + } + + + carver = new ScalpelCarver(); + if (! carver.isInitialized()) { + logger.log(Level.SEVERE, "Error initializing scalpel carver. "); + return; + } + + // make sure module output directory exists; create it if it doesn't + moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + + File.separator + MODULE_OUTPUT_DIR_NAME; + File moduleOutputDir = new File(moduleOutputDirPath); + if (!moduleOutputDir.exists()) { + if (!moduleOutputDir.mkdir()) { + logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module."); + return; + } + } + + // create path to scalpel config file in user's home directory + configFilePath = PlatformUtil.getUserConfigDirectory() + + File.separator + configFileName; + + // copy the default config file to the user's home directory if one + // is not already there + try { + PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName); + } catch (IOException ex) { + logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex); + return; + } + + initialized = true; + } +} diff --git a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverModuleFactory.java b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverModuleFactory.java new file mode 100755 index 0000000000..e86b83ee26 --- /dev/null +++ b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverModuleFactory.java @@ -0,0 +1,65 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.scalpel; + +// TODO: Uncomment the following line to allow the ingest framework to use this module +//import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; +import org.sleuthkit.autopsy.ingest.IngestModuleSettings; + +/** + * An factory that creates file ingest modules that use Scalpel to carve + * unallocated space. + */ +// TODO: Uncomment the following line to allow the ingest framework to use this module +//@ServiceProvider(service = IngestModuleFactory.class) +public class ScalpelCarverModuleFactory extends IngestModuleFactoryAdapter { + + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return "Scalpel Carver"; + } + + @Override + public String getModuleDescription() { + return "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree."; + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + @Override + public boolean isFileIngestModuleFactory() { + return true; + } + + @Override + public FileIngestModule createFileIngestModule(IngestModuleSettings ingestOptions) { + return new ScalpelCarverIngestModule(); + } +} diff --git a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java index a03bd9f47b..c5b72e609f 100644 --- a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java +++ b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java @@ -58,8 +58,6 @@ import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.ingest.IngestModule.ResultCode; import org.sleuthkit.autopsy.ingest.IngestModuleContext; -// RJCTODO: Possibly use getContext().getModuleDisplayName() more - /** * 7Zip ingest module Extracts supported archives, adds extracted DerivedFiles, * reschedules extracted DerivedFiles for ingest. @@ -92,8 +90,8 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F } @Override - public void startUp(IngestModuleContext context) { - setContext(context); + public void startUp(IngestModuleContext context) throws IngestModuleException{ + super.startUp(context); unpackDir = getContext().getOutputDirectoryRelativePath(); unpackDirPath = getContext().getOutputDirectoryAbsolutePath(); fileManager = getContext().getCase().getServices().getFileManager(); diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index 1b4e15cbb0..7f1d11a8c6 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -35,21 +35,21 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.IngestModuleContext; -import org.sleuthkit.autopsy.ingest.IngestModuleTempApiShim; /** * Data source ingest module that verifies the integrity of an Expert Witness * Format (EWF) E01 image file by generating a hash of the file and comparing it * to the value stored in the image. */ -public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSourceIngestModule, IngestModuleTempApiShim { +public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { private static final Logger logger = Logger.getLogger(EwfVerifyIngestModule.class.getName()); private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; private static final IngestServices services = IngestServices.getDefault(); + private IngestModuleContext context; private Image img; private String imgName; private MessageDigest messageDigest; - private static int messageId = 0; + private static int messageId = 0; // RJCTODO: Copy-paste synchronized implementation, put in sample also private boolean verified = false; private boolean skipped = false; private String calculatedHash = ""; @@ -59,13 +59,8 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo } @Override - public String getDisplayName() { - return EwfVerifierModuleFactory.getModuleName(); - } - - @Override - public void startUp(IngestModuleContext context) { - setContext(context); + public void startUp(IngestModuleContext context) throws Exception { + this.context = context; verified = false; skipped = false; img = null; @@ -93,8 +88,8 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo } catch (TskCoreException ex) { img = null; String message = "Failed to get image from Content"; - getContext().logError(EwfVerifyIngestModule.class, message, ex); - getContext().postIngestMessage(++messageId, MessageType.ERROR, message); + context.logError(EwfVerifyIngestModule.class, message, ex); + context.postIngestMessage(++messageId, MessageType.ERROR, message); return ResultCode.ERROR; } @@ -102,7 +97,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { img = null; logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, getDisplayName(), + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(), "Skipping non-ewf image " + imgName)); skipped = true; return ResultCode.OK; @@ -114,19 +109,19 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash}); } else { - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, getDisplayName(), + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(), "Image " + imgName + " does not have stored hash.")); return ResultCode.ERROR; } logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName()); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, getDisplayName(), + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(), "Starting " + imgName)); long size = img.getSize(); if (size == 0) { logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, getDisplayName(), + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(), "Error getting size of " + imgName + ". Image will not be processed.")); } @@ -152,7 +147,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo read = img.read(data, i * chunkSize, chunkSize); } catch (TskCoreException ex) { String msg = "Error reading " + imgName + " at chunk " + i; - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, getDisplayName(), msg)); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(), msg)); logger.log(Level.SEVERE, msg, ex); return ResultCode.ERROR; } @@ -169,14 +164,14 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo @Override public void shutDown(boolean ingestJobCancelled) { - logger.log(Level.INFO, "complete() {0}", getDisplayName()); + logger.log(Level.INFO, "complete() {0}", EwfVerifierModuleFactory.getModuleName()); if (skipped == false) { String msg = verified ? " verified" : " not verified"; String extra = "

EWF Verification Results for " + imgName + "

"; extra += "
  • Result:" + msg + "
  • "; extra += "
  • Calculated hash: " + calculatedHash + "
  • "; extra += "
  • Stored hash: " + storedHash + "
  • "; - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, getDisplayName(), imgName + msg, extra)); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(), imgName + msg, extra)); logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg}); } } diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/EmailParserModuleFactory.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/EmailParserModuleFactory.java index a78ba4199a..a9fb4c7527 100755 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/EmailParserModuleFactory.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/EmailParserModuleFactory.java @@ -42,7 +42,7 @@ public class EmailParserModuleFactory extends IngestModuleFactoryAdapter { @Override public String getModuleDisplayName() { - return "Archive Extractor"; + return getModuleName(); } @Override