From 2b95138f70f4ecf8cd6bcc969cf5b10d1266bed2 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 3 Mar 2014 22:45:48 -0500 Subject: [PATCH] Work towards converting core ingest modules to new ingest API --- .../ingest/DataSourceIngestModule.java | 3 +- .../ingest/IngestDataSourceThread.java | 295 +++-- .../IngestDataSourceWorkerController.java | 114 +- .../autopsy/ingest/IngestManager.java | 15 +- .../autopsy/ingest/IngestModuleInit.java | 49 - .../autopsy/ingest/IngestModuleLoader.java | 1065 +---------------- ExifParser/nbproject/project.xml | 8 + .../ExifParserFileIngestModule.java | 406 +++---- .../exifparser/ExifParserModuleFactory.java | 62 + .../hashdatabase/HashDbIngestModule.java | 136 +-- .../KeywordSearchIngestModule.java | 349 +++--- .../KeywordSearchModuleFactory.java | 121 ++ ewfVerify/nbproject/project.xml | 8 + .../ewfverify/EwfVerifierModuleFactory.java | 63 + .../ewfverify/EwfVerifyIngestModule.java | 378 +++--- 15 files changed, 1066 insertions(+), 2006 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java create mode 100755 ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java create mode 100755 KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java create mode 100755 ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 2d6f7d730b..38e5a959a1 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -32,6 +32,5 @@ public interface DataSourceIngestModule extends IngestModule { * @param statusHelper A status helper to be used to report progress and * detect task cancellation. */ -// void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class - void process(Content dataSource); + void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java index 48abeca7b5..d3ee0e1d84 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java @@ -18,157 +18,154 @@ */ package org.sleuthkit.autopsy.ingest; -//import java.awt.EventQueue; -//import java.util.concurrent.locks.Lock; -//import java.util.concurrent.locks.ReentrantReadWriteLock; -//import java.util.logging.Level; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import javax.swing.SwingWorker; -//import org.netbeans.api.progress.ProgressHandle; -//import org.netbeans.api.progress.ProgressHandleFactory; -//import org.openide.util.Cancellable; -//import org.sleuthkit.autopsy.coreutils.PlatformUtil; -//import org.sleuthkit.autopsy.coreutils.StopWatch; -//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; -//import org.sleuthkit.datamodel.Content; +import java.awt.EventQueue; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import javax.swing.SwingWorker; +import org.netbeans.api.progress.ProgressHandle; +import org.netbeans.api.progress.ProgressHandleFactory; +import org.openide.util.Cancellable; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.StopWatch; +import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; +import org.sleuthkit.datamodel.Content; /** * Worker thread that runs a data source-level ingest module (image, file set virt dir, etc). * Used to process only a single data-source and single module. */ -// class IngestDataSourceThread extends SwingWorker { -// -// private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName()); -// private ProgressHandle progress; -// private final Content dataSource; -// private final DataSourceIngestModule module; -// private IngestDataSourceWorkerController controller; -// private final IngestManager manager; -// private final IngestModuleInit init; -// private boolean inited; -// //current method of enqueuing data source ingest modules with locks and internal lock queue -// //ensures that we init, run and complete a single data source ingest module at a time -// //uses fairness policy to run them in order enqueued -// private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock(); -// -// IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module, IngestModuleInit init) { -// this.manager = manager; -// this.dataSource = dataSource; -// this.module = module; -// this.init = init; -// this.inited = false; -// } -// -// Content getContent() { -// return dataSource; -// } -// -// DataSourceIngestModule getModule() { -// return module; -// } -// -// public void init() { -// -// logger.log(Level.INFO, "Initializing module: " + module.getName()); -// try { -// module.init(dataSource.getId()); -// inited = true; -// } catch (Exception e) { -// logger.log(Level.INFO, "Failed initializing module: " + module.getName() + ", will not run."); -// //will not run -// inited = false; -// throw e; -// } -// } -// -// @Override -// protected Void doInBackground() throws Exception { -// -// logger.log(Level.INFO, "Pending module: " + module.getName()); -// -// final String displayName = module.getName() + " dataSource id:" + dataSource.getId(); -// progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() { -// @Override -// public boolean cancel() { -// logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user."); -// if (progress != null) { -// progress.setDisplayName(displayName + " (Cancelling...)"); -// } -// return IngestDataSourceThread.this.cancel(true); -// } -// }); -// progress.start(); -// progress.switchToIndeterminate(); -// -// dataSourceIngestModuleLock.lock(); -// try { -// if (this.isCancelled()) { -// logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName()); -// return Void.TYPE.newInstance(); -// } -// logger.log(Level.INFO, "Starting module: " + module.getName()); -// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); -// progress.setDisplayName(displayName); -// -// if (inited == false) { -// logger.log(Level.INFO, "Module wasn't initialized, will not run: " + module.getName()); -// return Void.TYPE.newInstance(); -// } -// logger.log(Level.INFO, "Starting processing of module: " + module.getName()); -// -// controller = new IngestDataSourceWorkerController(this, progress); -// -// if (isCancelled()) { -// logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation."); -// return Void.TYPE.newInstance(); -// } -// final StopWatch timer = new StopWatch(); -// timer.start(); -// try { -// // RJCTODO -//// module.process(pipelineContext, dataSource, controller); -// } catch (Exception e) { -// logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e); -// } finally { -// timer.stop(); -// logger.log(Level.INFO, "Done processing of module: " + module.getName() -// + " took " + timer.getElapsedTimeSecs() + " secs. to process()"); -// -// -// //cleanup queues (worker and DataSource/module) -// manager.removeDataSourceIngestWorker(this); -// -// if (!this.isCancelled()) { -// logger.log(Level.INFO, "Module " + module.getName() + " completed"); -// try { -// module.complete(); -// } catch (Exception e) { -// logger.log(Level.INFO, "Error completing the module " + module.getName(), e); -// } -// IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getName()); -// } else { -// logger.log(Level.INFO, "Module " + module.getName() + " stopped"); -// try { -// module.stop(); -// } catch (Exception e) { -// logger.log(Level.INFO, "Error stopping the module" + module.getName(), e); -// } -// IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getName()); -// } -// -// } -// return Void.TYPE.newInstance(); -// } finally { -// //release the lock so next module can run -// dataSourceIngestModuleLock.unlock(); -// EventQueue.invokeLater(new Runnable() { -// @Override -// public void run() { -// progress.finish(); -// } -// }); -// logger.log(Level.INFO, "Done running module: " + module.getName()); -// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); -// } -// } -//} + class IngestDataSourceThread extends SwingWorker { + + private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName()); + private ProgressHandle progress; + private final Content dataSource; + private final DataSourceIngestModule module; + private IngestDataSourceWorkerController controller; + private final IngestManager manager; + private boolean inited; + //current method of enqueuing data source ingest modules with locks and internal lock queue + //ensures that we init, run and complete a single data source ingest module at a time + //uses fairness policy to run them in order enqueued + private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock(); + + IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module) { + this.manager = manager; + this.dataSource = dataSource; + this.module = module; + this.inited = false; + } + + Content getContent() { + return dataSource; + } + + DataSourceIngestModule getModule() { + return module; + } + + public void init() { + + logger.log(Level.INFO, "Initializing module: {0}", module.getDisplayName()); + try { + module.init(dataSource.getId()); + inited = true; + } catch (Exception e) { + logger.log(Level.INFO, "Failed initializing module: {0}, will not run.", module.getDisplayName()); + //will not run + inited = false; + throw e; + } + } + + @Override + protected Void doInBackground() throws Exception { + + logger.log(Level.INFO, "Pending module: {0}", module.getDisplayName()); + + final String displayName = module.getDisplayName() + " dataSource id:" + dataSource.getId(); + progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() { + @Override + public boolean cancel() { + logger.log(Level.INFO, "DataSource ingest module {0} cancelled by user.", module.getDisplayName()); + if (progress != null) { + progress.setDisplayName(displayName + " (Cancelling...)"); + } + return IngestDataSourceThread.this.cancel(true); + } + }); + progress.start(); + progress.switchToIndeterminate(); + + dataSourceIngestModuleLock.lock(); + try { + if (this.isCancelled()) { + logger.log(Level.INFO, "Cancelled while pending, module: {0}", module.getDisplayName()); + return Void.TYPE.newInstance(); + } + logger.log(Level.INFO, "Starting module: {0}", module.getDisplayName()); + logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); + progress.setDisplayName(displayName); + + if (inited == false) { + logger.log(Level.INFO, "Module wasn''t initialized, will not run: {0}", module.getDisplayName()); + return Void.TYPE.newInstance(); + } + logger.log(Level.INFO, "Starting processing of module: {0}", module.getDisplayName()); + + controller = new IngestDataSourceWorkerController(this, progress); + + if (isCancelled()) { + logger.log(Level.INFO, "Terminating DataSource ingest module {0} due to cancellation.", module.getDisplayName()); + return Void.TYPE.newInstance(); + } + final StopWatch timer = new StopWatch(); + timer.start(); + try { + // RJCTODO +// module.process(pipelineContext, dataSource, controller); + } catch (Exception e) { + logger.log(Level.WARNING, "Exception in module: " + module.getDisplayName() + " DataSource: " + dataSource.getName(), e); + } finally { + timer.stop(); + logger.log(Level.INFO, "Done processing of module: {0} took {1} secs. to process()", new Object[]{module.getDisplayName(), timer.getElapsedTimeSecs()}); + + + //cleanup queues (worker and DataSource/module) + manager.removeDataSourceIngestWorker(this); + + if (!this.isCancelled()) { + logger.log(Level.INFO, "Module {0} completed", module.getDisplayName()); + try { + module.complete(); + } catch (Exception e) { + logger.log(Level.INFO, "Error completing the module " + module.getDisplayName(), e); + } + IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getDisplayName()); + } else { + logger.log(Level.INFO, "Module {0} stopped", module.getDisplayName()); + try { + module.stop(); + } catch (Exception e) { + logger.log(Level.INFO, "Error stopping the module" + module.getDisplayName(), e); + } + IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getDisplayName()); + } + + } + return Void.TYPE.newInstance(); + } finally { + //release the lock so next module can run + dataSourceIngestModuleLock.unlock(); + EventQueue.invokeLater(new Runnable() { + @Override + public void run() { + progress.finish(); + } + }); + logger.log(Level.INFO, "Done running module: {0}", module.getDisplayName()); + logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java index b7fb96c977..f12ec02370 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,64 +18,64 @@ */ package org.sleuthkit.autopsy.ingest; -//import org.netbeans.api.progress.ProgressHandle; +import org.netbeans.api.progress.ProgressHandle; -// RJCTODO: Rework or replace this code +// RJCTODO: This could use a renaming, really don't want it long-term, but maybe need to keep it for 3.1 DISCUSS /** * Controller for DataSource level ingest modules * Used by modules to check task status and to post progress to */ -//public class IngestDataSourceWorkerController { -// -// private IngestDataSourceThread worker; -// private ProgressHandle progress; -// -// /** -// * Instantiate the controller for the worker -// * @param worker underlying DataSource ingest thread -// * @param progress the progress handle -// */ -// IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) { -// this.worker = worker; -// this.progress = progress; -// } -// -// /** -// * Check if the task has been cancelled. This should be polled by the module periodically -// * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup -// * -// * @return true if the task has been cancelled, false otherwise -// */ -// public boolean isCancelled() { -// return worker.isCancelled(); -// } -// -// /** -// * Update the progress bar and switch to determinate mode once number of total work units is known -// * @param workUnits total number of work units for the DataSource ingest task -// */ -// public void switchToDeterminate(int workUnits) { -// if (progress != null) { -// progress.switchToDeterminate(workUnits); -// } -// } -// -// /** -// * Update the progress bar and switch to non determinate mode if number of work units is not known -// */ -// public void switchToInDeterminate() { -// if (progress != null) { -// progress.switchToIndeterminate(); -// } -// } -// -// /** -// * Update the progress bar with the number of work units performed, if in the determinate mode -// * @param workUnits number of work units performed so far by the module -// */ -// public void progress(int workUnits) { -// if (progress != null) { -// progress.progress(worker.getContent().getName(), workUnits); -// } -// } -//} \ No newline at end of file +public class IngestDataSourceWorkerController { + + private IngestDataSourceThread worker; + private ProgressHandle progress; + + /** + * Instantiate the controller for the worker + * @param worker underlying DataSource ingest thread + * @param progress the progress handle + */ + IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) { + this.worker = worker; + this.progress = progress; + } + + /** + * Check if the task has been canceled. This should be polled by the module periodically + * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup + * + * @return true if the task has been canceled, false otherwise + */ + public boolean isCancelled() { + return worker.isCancelled(); + } + + /** + * Update the progress bar and switch to determinate mode once number of total work units is known + * @param workUnits total number of work units for the DataSource ingest task + */ + public void switchToDeterminate(int workUnits) { + if (progress != null) { + progress.switchToDeterminate(workUnits); + } + } + + /** + * Update the progress bar and switch to non determinate mode if number of work units is not known + */ + public void switchToInDeterminate() { + if (progress != null) { + progress.switchToIndeterminate(); + } + } + + /** + * Update the progress bar with the number of work units performed, if in the determinate mode + * @param workUnits number of work units performed so far by the module + */ + public void progress(int workUnits) { + if (progress != null) { + progress.progress(worker.getContent().getName(), workUnits); + } + } +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 3fe167deed..6c7c226568 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -58,7 +58,7 @@ public class IngestManager { // private IngestManagerStats stats; // RJCTODO: Decide whether to reimplement private final IngestScheduler scheduler; private IngestAbstractFileProcessor abstractFileIngester; -// private List dataSourceIngesters; // RJCTODO: Adapt to new paradigm + private List dataSourceIngesters; private SwingWorker queueWorker; // private final Map abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class); @@ -672,14 +672,13 @@ public class IngestManager { } } - // RJCTODO: Data source ingest is temporarily disabled //data source worker to remove itself when complete or interrupted -// void removeDataSourceIngestWorker(IngestDataSourceThread worker) { -// //remove worker -// synchronized (this) { -// dataSourceIngesters.remove(worker); -// } -// } + void removeDataSourceIngestWorker(IngestDataSourceThread worker) { + //remove worker + synchronized (this) { + dataSourceIngesters.remove(worker); + } + } // RJCTODO: Decide whether or not to reimplement this class /** diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java deleted file mode 100644 index 013a123065..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2012 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -/** - * - * Context passed to a module at initialization time. - * It may contain module configuration required to initialize some modules. - */ -public class IngestModuleInit { - -// private String moduleArgs; - - /** - * Get module arguments - * @return module args string, used by some modules - */ -// public String getModuleArgs() { -// return moduleArgs; -// } - - /** - * Sets module args. string (only used by module pipeline) - * @param moduleArgs arguments to set for the module - */ -// void setModuleArgs(String moduleArgs) { -// this.moduleArgs = moduleArgs; -// } -// - - - -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java index d8e33eed9b..82a4c1e7f9 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java @@ -19,1075 +19,44 @@ package org.sleuthkit.autopsy.ingest; -import java.beans.PropertyChangeListener; -import java.beans.PropertyChangeSupport; -import java.io.File; -import java.io.FilenameFilter; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.lang.reflect.Type; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLDecoder; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.logging.Level; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import org.openide.modules.ModuleInfo; -import org.openide.util.Exceptions; import org.openide.util.Lookup; -import org.openide.util.LookupEvent; -import org.openide.util.LookupListener; -import org.reflections.Reflections; -import org.reflections.scanners.ResourcesScanner; -import org.reflections.scanners.SubTypesScanner; -import org.reflections.util.ConfigurationBuilder; -import org.reflections.util.FilterBuilder; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.coreutils.XMLUtil; -import org.w3c.dom.Comment; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.NodeList; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -// RJCTODO: Rewrite comment, complete reworking of class -/** - * Class responsible for discovery and loading ingest modules specified in - * pipeline XML file. Maintains a singleton instance. Requires restart of - * application for changes in XML to take effect. - * - * Supports module auto-discovery from system-wide and user-dir wide jar files. - * Discovered modules are validated, and if valid, they are added to end of - * configuration and saved in the XML. - * - * If module is removed/uninstalled, it will remain in the XML file, but it will - * not load because it will fail the validation. - * - * Get a handle to the object by calling static getDefault() method. The - * singleton instance will initialize itself the first time - it will load XML - * and autodiscover currently present ingest modules in the jar classpath.. - * - * - * Refer to - * http://sleuthkit.org/sleuthkit/docs/framework-docs/pipeline_config_page.html - * for the pipeline XML fiel schema details. - * - * NOTE: this will be part of future IngestPipelineManager with IngestManager - * code refactored - */ - final class IngestModuleLoader { - private static IngestModuleLoader instance; - private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); - private ArrayList moduleFactories = new ArrayList<>(); - private PropertyChangeSupport pcs; -// private static final String PIPELINE_CONFIG_XML = "pipeline_config.xml"; -// private static final String XSDFILE = "PipelineConfigSchema.xsd"; -// private String absFilePath; - //raw XML pipeline representation for validation -// private final List pipelinesXML; - //validated pipelines with instantiated modules -// private final List filePipeline; -// private final List dataSourcePipeline; -// private ClassLoader classLoader; -// private static final String ENCODING = "UTF-8"; -// private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; -// private SimpleDateFormat dateFormatter; - //used to specify default unique module order of autodiscovered modules - //if not specified -// private int numModDiscovered = 0; -// private static String CUR_MODULES_DISCOVERED_SETTING = "curModulesDiscovered"; - - //events supported - enum Event { - ModulesReloaded - }; +// RJCTODO: Comment +final class IngestModuleLoader { + private static IngestModuleLoader instance; + private ArrayList moduleFactories = new ArrayList<>(); private IngestModuleLoader() { -// pipelinesXML = new ArrayList<>(); -// filePipeline = new ArrayList(); -// dataSourcePipeline = new ArrayList(); -// dateFormatter = new SimpleDateFormat(DATE_FORMAT); -// -// String numModDiscoveredStr = ModuleSettings.getConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING); -// if (numModDiscoveredStr != null) { -// try { -// numModDiscovered = Integer.valueOf(numModDiscoveredStr); -// } catch (NumberFormatException e) { -// numModDiscovered = 0; -// logger.log(Level.WARNING, "Could not parse numModDiscovered setting, defaulting to 0", e); -// } -// } - - pcs = new PropertyChangeSupport(this); -// registerModulesChange(); } - synchronized static IngestModuleLoader getDefault() /*throws IngestModuleLoaderException*/ { + synchronized static IngestModuleLoader getDefault() { if (instance == null) { - logger.log(Level.INFO, "Creating ingest module loader instance"); + Logger.getLogger(IngestModuleLoader.class.getName()).log(Level.INFO, "Creating ingest module loader instance"); instance = new IngestModuleLoader(); instance.init(); } return instance; } - /** - * Add a listener to listen for modules reloaded events such as when new - * modules have been added / removed / reconfigured - * - * @param l listener to add - */ - void addModulesReloadedListener(PropertyChangeListener l) { - pcs.addPropertyChangeListener(l); - } - - /** - * Remove a listener to listen for modules reloaded events such as when new - * modules have been added / removed / reconfigured - * - * @param l listener to remove - */ - void removeModulesReloadedListener(PropertyChangeListener l) { - pcs.removePropertyChangeListener(l); - } - - /** - * validate raw pipeline, set valid to true member on pipeline and modules - * if valid log if invalid - * - * valid pipeline: valid pipeline type, modules have unique ordering - * - * valid module: module class exists, module can be loaded, module - * implements correct interface, module has proper methods and modifiers to - * create an instance - * - * @throws IngestModuleLoaderException - */ -// private void validate() throws IngestModuleLoaderException { -// for (IngestModuleLoader.IngestPipelineXMLDescriptor pRaw : pipelinesXML) { -// boolean pipelineErrors = false; -// -// //check pipelineType -// String pipelineType = pRaw.type; -// -// IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pType = null; -// -// try { -// pType = IngestModuleLoader.IngestPipelineXMLDescriptor.getPipelineType(pipelineType); -// } catch (IllegalArgumentException e) { -// pipelineErrors = true; -// logger.log(Level.SEVERE, "Unknown pipeline type: " + pipelineType); -// -// } -// //ordering store -// Map orderings = new HashMap(); -// -// for (IngestModuleLoader.IngestModuleXMLDescriptor pMod : pRaw.modules) { -// boolean moduleErrors = false; -// -// //record ordering for validation -// int order = pMod.order; -// if (orderings.containsKey(order)) { -// orderings.put(order, orderings.get(order) + 1); -// } else { -// orderings.put(order, 1); -// } -// -// //check pipelineType -// String modType = pMod.type; -// if (!modType.equals(IngestModuleLoader.IngestModuleXMLDescriptor.MODULE_TYPE.PLUGIN.toString())) { -// moduleErrors = true; -// logger.log(Level.SEVERE, "Unknown module type: " + modType); -// } -// -// //classes exist and interfaces implemented -// String location = pMod.location; -// try { -// //netbeans uses custom class loader, otherwise can't load classes from other modules -// -// final Class moduleClass = Class.forName(location, false, classLoader); -// final Type intf = moduleClass.getGenericSuperclass(); -// -// if (pType != null) { -// Class moduleMeta = ((IngestModuleMapping) pType).getIngestModuleInterface(); -// String moduleIntNameCan = moduleMeta.getCanonicalName(); -// String[] moduleIntNameTok = moduleIntNameCan.split(" "); -// String moduleIntName = moduleIntNameTok[moduleIntNameTok.length - 1]; -// -// String intNameCan = intf.toString(); -// String[] intNameCanTok = intNameCan.split(" "); -// String intName = intNameCanTok[intNameCanTok.length - 1]; -// if (!intName.equals(moduleIntName)) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location -// + " does not implement correct interface: " + moduleMeta.getName() -// + " required for pipeline: " + pType.toString() -// + ", module will not be active."); -// } -// } else { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " does not implement any interface, module will not be active."); -// } -// -// //if file module: check if has public static getDefault() -// if (pType == IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.FILE_ANALYSIS) { -// try { -// Method getDefaultMethod = moduleClass.getMethod("getDefault"); -// int modifiers = getDefaultMethod.getModifiers(); -// if (!(Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers))) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " does not implement public static getDefault() singleton method."); -// } -// if (!getDefaultMethod.getReturnType().equals(moduleClass)) { -// logger.log(Level.WARNING, "Module class: " + location + " getDefault() singleton method should return the module class instance: " + moduleClass.getName()); -// } -// -// } catch (NoSuchMethodException ex) { -// Exceptions.printStackTrace(ex); -// } catch (SecurityException ex) { -// Exceptions.printStackTrace(ex); -// } -// } //if data source module: check if has public constructor with no args -// else if (pType == IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS) { -// try { -// Constructor constr = moduleClass.getConstructor(); -// int modifiers = constr.getModifiers(); -// if (!Modifier.isPublic(modifiers)) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); -// } -// } catch (NoSuchMethodException ex) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); -// } catch (SecurityException ex) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); -// } -// } -// -// } catch (ClassNotFoundException ex) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " not found, module will not be active."); -// -// } catch (LinkageError le) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " has unresolved symbols, module will not be active.", le); -// } -// -// -// //validate ordering -// for (int o : orderings.keySet()) { -// int count = orderings.get(o); -// if (count > 1) { -// pipelineErrors = true; -// logger.log(Level.SEVERE, "Pipeline " + pipelineType + " invalid non-unique ordering of modules, order: " + o); -// } -// } -// -// pMod.valid = !moduleErrors; -// logger.log(Level.INFO, "Module " + pMod.location + " valid: " + pMod.valid); -// } //end module -// -// pRaw.valid = !pipelineErrors; -// logger.log(Level.INFO, "Pipeline " + pType + " valid: " + pRaw.valid); -// } //end pipeline -// -// } - -// private Set getJarPaths(String modulesDir) { -// Set urls = new HashSet(); -// -// final File modulesDirF = new File(modulesDir); -// FilenameFilter jarFilter = new FilenameFilter() { -// @Override -// public boolean accept(File dir, String name) { -// return dir.equals(modulesDirF) && name.endsWith(".jar"); -// } -// }; -// File[] dirJars = modulesDirF.listFiles(jarFilter); -// if (dirJars != null) { -// //modules dir exists -// for (int i = 0; i < dirJars.length; ++i) { -// String urlPath = "file:/" + dirJars[i].getAbsolutePath(); -// try { -// urlPath = URLDecoder.decode(urlPath, ENCODING); -// } catch (UnsupportedEncodingException ex) { -// logger.log(Level.SEVERE, "Could not decode file path. ", ex); -// } -// -// try { -// urls.add(new URL(urlPath)); -// //logger.log(Level.INFO, "JAR: " + urlPath); -// } catch (MalformedURLException ex) { -// logger.log(Level.WARNING, "Invalid URL: " + urlPath, ex); -// } -// } -// } -// -// /* -// * netbeans way, but not public API -// org.openide.filesystems.Repository defaultRepository = Repository.getDefault(); -// FileSystem masterFilesystem = defaultRepository.getDefaultFileSystem(); -// org.netbeans.core.startup.ModuleSystem moduleSystem = new org.netbeans.core.startup.ModuleSystem(masterFilesystem); -// List jars = moduleSystem.getModuleJars(); -// for (File jar : jars) { -// logger.log(Level.INFO, " JAR2: " + jar.getAbsolutePath()); -// } -// //org.netbeans.ModuleManager moduleManager = moduleSystem.getManager(); -// */ -// -// return urls; -// } - - /** - * Get jar paths of autodiscovered modules - * - * @param moduleInfos to look into to discover module jar paths - * @return - */ -// private Set getJarPaths(Collection moduleInfos) { -// Set urls = new HashSet(); -// -// //TODO lookup module jar file paths by "seed" class or resource, using the module loader -// //problem: we don't have a reliable "seed" class in every moduke -// //and loading by Bundle.properties resource does not seem to work with the module class loader -// //for now hardcoding jar file locations -// -// /* -// for (ModuleInfo moduleInfo : moduleInfos) { -// -// if (moduleInfo.isEnabled() == false) { -// continue; -// } -// -// String basePackageName = moduleInfo.getCodeNameBase(); -// if (basePackageName.startsWith("org.netbeans") -// || basePackageName.startsWith("org.openide")) { -// //skip -// continue; -// } -// -// -// ClassLoader moduleClassLoader = moduleInfo.getClassLoader(); -// -// URL modURL = moduleClassLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL1 : " + modURL); -// -// modURL = moduleClassLoader.getParent().getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL2 : " + modURL); -// -// modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL3 : " + modURL); -// } */ -// /* -// URL modURL = moduleClassLoader.getParent().getResource("Bundle.properties"); -// //URL modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL : " + modURL); -// -// modURL = moduleClassLoader.getResource(basePackageName + ".Bundle.properties"); -// //URL modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL : " + modURL); -// -// modURL = moduleClassLoader.getResource("Bundle"); -// //URL modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL : " + modURL); -// -// Class modClass; -// try { -// modClass = classLoader.loadClass(basePackageName + ".Installer"); -// URL modURL2 = modClass.getProtectionDomain().getCodeSource().getLocation(); -// logger.log(Level.INFO, "GOT MOD URL2 : " + modURL2); -// } catch (ClassNotFoundException ex) { -// // Exceptions.printStackTrace(ex); -// } -// try { -// Class moduleBundleClass = -// Class.forName(basePackageName, false, classLoader); -// URL modURL3 = moduleBundleClass.getProtectionDomain().getCodeSource().getLocation(); -// logger.log(Level.INFO, "GOT MOD URL3 : " + modURL3); -// } catch (ClassNotFoundException ex) { -// // Exceptions.printStackTrace(ex); -// } -// -// -// URL urltry; -// try { -// urltry = moduleClassLoader.loadClass("Bundle").getProtectionDomain().getCodeSource().getLocation(); -// logger.log(Level.INFO, "GOT TRY URL : " + urltry); -// } catch (ClassNotFoundException ex) { -// // Exceptions.printStackTrace(ex); -// } -// -// } -// * */ -// -// //core modules -// urls.addAll(getJarPaths(PlatformUtil.getInstallModulesPath())); -// -// //user modules -// urls.addAll(getJarPaths(PlatformUtil.getUserModulesPath())); -// -// // add other project dirs, such as from external modules -// for (String projectDir : PlatformUtil.getProjectsDirs()) { -// File modules = new File(projectDir + File.separator + "modules"); -// if (modules.exists()) { -// urls.addAll(getJarPaths(modules.getAbsolutePath())); -// } -// } -// -// -// -// return urls; -// } - - List getIngestModuleFactories() { - return moduleFactories; - } - - /** - * Auto-discover ingest modules in all platform modules that are "enabled" - * If discovered ingest module is not already in XML config, add it do - * config and add to in-memory pipeline. - * - * @throws IngestModuleLoaderException - */ -// @SuppressWarnings("unchecked") - private void autodiscover() /*throws IngestModuleLoaderException*/ { - + private void init() { + // RJCTODO: Add code to listen to changes in the collections, possibly restore listener code... + // RJCTODO: Since we were going to overwrite pipeline config every time and we are going to move the code modules + // into this package, we can simply handle the module ordering here, possibly just directly instantiating the core + // modules. + Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); for (IngestModuleFactory factory : factories) { logger.log(Level.INFO, "Loaded ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()}); moduleFactories.add(factory); - } - -// // Use Lookup to find the other NBM modules. We'll later search them for ingest modules -// Collection moduleInfos = Lookup.getDefault().lookupAll(ModuleInfo.class); -// logger.log(Level.INFO, "Autodiscovery, found #platform modules: " + moduleInfos.size()); -// -// Set urls = getJarPaths(moduleInfos); -// ArrayList reflectionsSet = new ArrayList<>(); -// -// for (final ModuleInfo moduleInfo : moduleInfos) { -// if (moduleInfo.isEnabled()) { -// /* NOTE: We have an assumption here that the modules in an NBM will -// * have the same package name as the NBM name. This means that -// * an NBM can have only one package with modules in it. */ -// String basePackageName = moduleInfo.getCodeNameBase(); -// -// // skip the standard ones -// if (basePackageName.startsWith("org.netbeans") -// || basePackageName.startsWith("org.openide")) { -// continue; -// } -// -// logger.log(Level.INFO, "Found module: " + moduleInfo.getDisplayName() + " " + basePackageName -// + " Build version: " + moduleInfo.getBuildVersion() -// + " Spec version: " + moduleInfo.getSpecificationVersion() -// + " Impl version: " + moduleInfo.getImplementationVersion()); -// -// ConfigurationBuilder cb = new ConfigurationBuilder(); -// cb.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(basePackageName))); -// cb.setUrls(urls); -// cb.setScanners(new SubTypesScanner(), new ResourcesScanner()); -// reflectionsSet.add(new Reflections(cb)); -// } -// else { -// // log if we have our own modules disabled -// if (moduleInfo.getCodeNameBase().startsWith("org.sleuthkit")) { -// logger.log(Level.WARNING, "Sleuth Kit Module not enabled: " + moduleInfo.getDisplayName()); -// } -// } -// } - - /* This area is used to load the example modules. They are not found via lookup since they - * are in this NBM module. - * Uncomment this section to rum the examples. - */ - /* - ConfigurationBuilder cb = new ConfigurationBuilder(); - cb.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix("org.sleuthkit.autopsy.examples"))); - cb.setUrls(urls); - cb.setScanners(new SubTypesScanner(), new ResourcesScanner()); - reflectionsSet.add(new Reflections(cb)); - */ - -// for (Reflections reflections : reflectionsSet) { -// -// Set fileModules = reflections.getSubTypesOf(IngestModuleAbstractFile.class); -// Iterator it = fileModules.iterator(); -// while (it.hasNext()) { -// logger.log(Level.INFO, "Found file ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); -// } -// -// Set dataSourceModules = reflections.getSubTypesOf(IngestModuleDataSource.class); -// it = dataSourceModules.iterator(); -// while (it.hasNext()) { -// logger.log(Level.INFO, "Found DataSource ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); -// } -// -// if ((fileModules.isEmpty()) && (dataSourceModules.isEmpty())) { -// logger.log(Level.INFO, "Module has no ingest modules: " + reflections.getClass().getSimpleName()); -// continue; -// } -// -// //find out which modules to add -// //TODO check which modules to remove (which modules were uninstalled) -// boolean modulesChanged = false; -// -// it = fileModules.iterator(); -// while (it.hasNext()) { -// boolean exists = false; -// Class foundClass = (Class) it.next(); -// -// for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { -// if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS.toString())) { -// continue; //skip -// } -// -// for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { -// //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); -// if (foundClass.getName().equals(rawM.location)) { -// exists = true; -// break; -// } -// } -// if (exists == true) { -// break; -// } -// } -// -// if (exists == false) { -// logger.log(Level.INFO, "Discovered a new file module to load: " + foundClass.getName()); -// //ADD MODULE -// addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS); -// modulesChanged = true; -// } -// -// } -// -// it = dataSourceModules.iterator(); -// while (it.hasNext()) { -// boolean exists = false; -// Class foundClass = (Class) it.next(); -// -// for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { -// if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS.toString())) { -// continue; //skip -// } -// -// -// for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { -// //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); -// if (foundClass.getName().equals(rawM.location)) { -// exists = true; -// break; -// } -// } -// if (exists == true) { -// break; -// } -// } -// -// if (exists == false) { -// logger.log(Level.INFO, "Discovered a new DataSource module to load: " + foundClass.getName()); -// //ADD MODULE -// addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS); -// modulesChanged = true; -// } -// -// } -// -// if (modulesChanged) { -// save(); -// -// try { -// pcs.firePropertyChange(IngestModuleLoader.Event.ModulesReloaded.toString(), 0, 1); -// } -// catch (Exception e) { -// logger.log(Level.SEVERE, "IngestModuleLoader listener threw exception", e); -// MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to IngestModuleLoader updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); -// } -// } -// } + } } - /** - * Set a new order of the module - * - * @param pipeLineType pipeline type where the module to reorder is present - * @param moduleLocation loaded module name (location), fully qualified - * class path - * @param newOrder new order to set - */ -// void setModuleOrder(IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pipeLineType, String moduleLocation, int newOrder) throws IngestModuleLoaderException { -// throw new IngestModuleLoaderException("Not yet implemented"); -// } - - /** - * add autodiscovered module to raw pipeline to be validated and - * instantiated - * - * @param moduleClass - * @param pipelineType - */ -// private void addModuleToRawPipeline(Class moduleClass, IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pipelineType) throws IngestModuleLoaderException { -// String moduleLocation = moduleClass.getName(); -// -// IngestModuleLoader.IngestModuleXMLDescriptor modRaw = new IngestModuleLoader.IngestModuleXMLDescriptor(); -// modRaw.arguments = ""; //default, no arguments -// modRaw.location = moduleLocation; -// modRaw.order = Integer.MAX_VALUE - (numModDiscovered++); //add to end -// modRaw.type = IngestModuleLoader.IngestModuleXMLDescriptor.MODULE_TYPE.PLUGIN.toString(); -// modRaw.valid = false; //to be validated -// -// //save the current numModDiscovered -// ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); -// -// //find the pipeline of that type -// IngestModuleLoader.IngestPipelineXMLDescriptor pipeline = null; -// for (IngestModuleLoader.IngestPipelineXMLDescriptor rawP : this.pipelinesXML) { -// if (rawP.type.equals(pipelineType.toString())) { -// pipeline = rawP; -// break; -// } -// } -// if (pipeline == null) { -// throw new IngestModuleLoaderException("Could not find expected pipeline of type: " + pipelineType.toString() + ", cannot add autodiscovered module: " + moduleLocation); -// } else { -// pipeline.modules.add(modRaw); -// logger.log(Level.INFO, "Added a new module " + moduleClass.getName() + " to pipeline " + pipelineType.toString()); -// } -// } - - /** - * Register a listener for module install/uninstall //TODO ensure that - * module is actually loadable when Lookup event is fired - */ -// private void registerModulesChange() { -// final Lookup.Result result = -// Lookup.getDefault().lookupResult(ModuleInfo.class); -// result.addLookupListener(new LookupListener() { -// @Override -// public void resultChanged(LookupEvent event) { -// try { -// logger.log(Level.INFO, "Module change occured, reloading."); -// init(); -// } catch (IngestModuleLoaderException ex) { -// logger.log(Level.SEVERE, "Error reloading the module loader. ", ex); -// } -// } -// }); -// } - - // RJCTODO: This is not used - /** - * Save the current in memory pipeline config, including autodiscovered - * modules - * - * @throws IngestModuleLoaderException - */ -// public void save() throws IngestModuleLoaderException { -// DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); -// -// try { -// DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); -// Document doc = docBuilder.newDocument(); -// -// -// Comment comment = doc.createComment("Saved by: " + getClass().getName() -// + " on: " + dateFormatter.format(System.currentTimeMillis())); -// doc.appendChild(comment); -// Element rootEl = doc.createElement(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_ROOT); -// doc.appendChild(rootEl); -// -// for (IngestModuleLoader.IngestPipelineXMLDescriptor rawP : this.pipelinesXML) { -// Element pipelineEl = doc.createElement(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_EL); -// pipelineEl.setAttribute(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_TYPE_ATTR, rawP.type); -// rootEl.appendChild(pipelineEl); -// -// for (IngestModuleLoader.IngestModuleXMLDescriptor rawM : rawP.modules) { -// Element moduleEl = doc.createElement(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_EL); -// -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_LOC_ATTR, rawM.location); -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR, rawM.type); -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ORDER_ATTR, Integer.toString(rawM.order)); -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR, rawM.type); -// -// pipelineEl.appendChild(moduleEl); -// } -// } -// -// XMLUtil.saveDoc(IngestModuleLoader.class, absFilePath, ENCODING, doc); -// logger.log(Level.INFO, "Pipeline configuration saved to: " + this.absFilePath); -// } catch (ParserConfigurationException e) { -// logger.log(Level.SEVERE, "Error saving pipeline config XML: can't initialize parser.", e); -// } -// -// } - - /** - * Instantiate valid pipeline and modules and store the module object - * references - * - * @throws IngestModuleLoaderException - */ -// @SuppressWarnings("unchecked") - private void instantiate() /*throws IngestModuleLoaderException*/ { - - //clear current -// filePipeline.clear(); -// dataSourcePipeline.clear(); - - //add autodiscovered modules to pipelinesXML - autodiscover(); - - //validate all modules: from XML + just autodiscovered - -// validate(); -// -// for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) { -// if (pRaw.valid == false) { -// //skip invalid pipelines -// continue; -// } -// -// //sort modules by order parameter, in case XML order is different -// Collections.sort(pRaw.modules, new Comparator() { -// @Override -// public int compare(IngestModuleLoader.XmlModuleRaw o1, IngestModuleLoader.XmlModuleRaw o2) { -// return Integer.valueOf(o1.order).compareTo(Integer.valueOf(o2.order)); -// } -// }); -// -// //check pipelineType, add to right pipeline collection -// IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pRaw.type); -// -// for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) { -// try { -// if (pMod.valid == false) { -// //skip invalid modules -// continue; -// } -// -// //add to right pipeline -// switch (pType) { -// case FILE_ANALYSIS: -// IngestModuleAbstractFile fileModuleInstance = null; -// final Class fileModuleClass = -// (Class) Class.forName(pMod.location, true, classLoader); -// try { -// Method getDefaultMethod = fileModuleClass.getMethod("getDefault"); -// if (getDefaultMethod != null) { -// fileModuleInstance = (IngestModuleAbstractFile) getDefaultMethod.invoke(null); -// } -// } catch (NoSuchMethodException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } catch (SecurityException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } catch (IllegalAccessException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } catch (InvocationTargetException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } -// -// filePipeline.add(fileModuleInstance); -// break; -// case DATA_SOURCE_ANALYSIS: -// final Class dataSourceModuleClass = -// (Class) Class.forName(pMod.location, true, classLoader); -// -// try { -// Constructor constr = dataSourceModuleClass.getConstructor(); -// IngestModuleDataSource dataSourceModuleInstance = constr.newInstance(); -// -// if (dataSourceModuleInstance != null) { -// dataSourcePipeline.add(dataSourceModuleInstance); -// } -// -// } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { -// logger.log(Level.WARNING, "Validated module, could not initialize, check for bugs in the module: " + pMod.location, ex); -// pMod.valid = false; -// } -// -// -// break; -// default: -// logger.log(Level.SEVERE, "Unexpected pipeline type to add module to: " + pType); -// } -// -// -// } catch (ClassNotFoundException ex) { -// logger.log(Level.SEVERE, "Validated module, but could not load (shouldn't happen): " + pMod.location); -// } -// } -// -// } //end instantiating modules in XML - } - - /** - * Get a new instance of the module or null if could not be created - * - * @param module existing module to get an instance of - * @return new module instance or null if could not be created - */ -// IngestModuleAbstract getNewIngestModuleInstance(IngestModuleAbstract module) { -// try { -// IngestModuleAbstract newInstance = module.getClass().newInstance(); -// return newInstance; -// } catch (InstantiationException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); -// return null; -// } catch (IllegalAccessException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); -// return null; -// } -// -// } - -// private IngestModuleAbstract getNewIngestModuleInstance(Class moduleClass) { -// try { -// IngestModuleAbstract newInstance = moduleClass.newInstance(); -// return newInstance; -// } catch (InstantiationException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); -// return null; -// } catch (IllegalAccessException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); -// return null; -// } -// -// } - - /** - * Load XML into raw pipeline representation - * - * @throws IngestModuleLoaderException - */ -// private void loadRawPipeline() throws IngestModuleLoaderException { -// final Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, absFilePath, XSDFILE); -// if (doc == null) { -// throw new IngestModuleLoaderException("Could not load pipeline config XML: " + this.absFilePath); -// } -// Element root = doc.getDocumentElement(); -// if (root == null) { -// String msg = "Error loading pipeline configuration: invalid file format."; -// logger.log(Level.SEVERE, msg); -// throw new IngestModuleLoaderException(msg); -// } -// NodeList pipelineNodes = root.getElementsByTagName(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_EL); -// int numPipelines = pipelineNodes.getLength(); -// if (numPipelines == 0) { -// throw new IngestModuleLoaderException("No pipelines found in the pipeline configuration: " + absFilePath); -// } -// for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { -// //process pipelines -// Element pipelineEl = (Element) pipelineNodes.item(pipelineNum); -// final String pipelineType = pipelineEl.getAttribute(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_TYPE_ATTR); -// logger.log(Level.INFO, "Found pipeline type: " + pipelineType); -// -// IngestModuleLoader.IngestPipelineXMLDescriptor pipelineRaw = new IngestModuleLoader.IngestPipelineXMLDescriptor(); -// pipelineRaw.type = pipelineType; -// this.pipelinesXML.add(pipelineRaw); -// -// //process modules -// NodeList modulesNodes = pipelineEl.getElementsByTagName(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_EL); -// int numModules = modulesNodes.getLength(); -// if (numModules == 0) { -// logger.log(Level.WARNING, "Pipeline: " + pipelineType + " has no modules defined."); -// } -// for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { -// //process modules -// Element moduleEl = (Element) modulesNodes.item(moduleNum); -// final String moduleType = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR); -// final String moduleOrder = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ORDER_ATTR); -// final String moduleLoc = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_LOC_ATTR); -// final String moduleArgs = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ARGS_ATTR); -// IngestModuleLoader.IngestModuleXMLDescriptor module = new IngestModuleLoader.IngestModuleXMLDescriptor(); -// module.arguments = moduleArgs; -// module.location = moduleLoc; -// try { -// module.order = Integer.parseInt(moduleOrder); -// } catch (NumberFormatException e) { -// logger.log(Level.WARNING, "Invalid module order, need integer: " + moduleOrder + ", adding to end of the list"); -// module.order = Integer.MAX_VALUE - (numModDiscovered++); -// //save the current numModDiscovered -// ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); -// -// } -// module.type = moduleType; -// pipelineRaw.modules.add(module); -// } -// -// } -// -// } - - /** - * Load and validate XML pipeline, autodiscover and instantiate the pipeline - * modules Can be called multiple times to refresh the view of modules - * - * @throws IngestModuleLoaderException - */ - public synchronized void init() /*throws IngestModuleLoaderException*/ { -// absFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINE_CONFIG_XML; -// ClassLoader parentClassLoader = Lookup.getDefault().lookup(ClassLoader.class); -// classLoader = new CustomClassLoader(parentClassLoader); -// -// try { -// boolean extracted = PlatformUtil.extractResourceToUserConfigDir(IngestModuleLoader.class, PIPELINE_CONFIG_XML); -// } catch (IOException ex) { -// logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir ", ex); -// } -// -// //load the pipeline config -// loadRawPipeline(); - - instantiate(); - - - } - -// private static final class IngestPipelineXMLDescriptor { -// -// enum PIPELINE_TYPE implements IngestModuleMapping { -// -// FILE_ANALYSIS { -// @Override -// public String toString() { -// return "FileAnalysis"; -// } -// -// @Override -// public Class getIngestModuleInterface() { -// return IngestModuleAbstractFile.class; -// } -// }, -// DATA_SOURCE_ANALYSIS { -// @Override -// public String toString() { -// return "ImageAnalysis"; -// } -// -// @Override -// public Class getIngestModuleInterface() { -// return IngestModuleDataSource.class; -// } -// },; -// } -// -// /** -// * get pipeline type for string mapping to type toString() method -// * -// * @param s string equals to one of the types toString() representation -// * @return matching type -// */ -// static IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE getPipelineType(String s) throws IllegalArgumentException { -// IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE[] types = IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.values(); -// for (int i = 0; i < types.length; ++i) { -// if (types[i].toString().equals(s)) { -// return types[i]; -// } -// } -// throw new IllegalArgumentException("No PIPELINE_TYPE for string: " + s); -// } -// private static final String XML_PIPELINE_ROOT = "PIPELINE_CONFIG"; -// private static final String XML_PIPELINE_EL = "PIPELINE"; -// private static final String XML_PIPELINE_TYPE_ATTR = "type"; -// String type; -// List modules = new ArrayList<>(); -// boolean valid = false; // if passed validation -// } -// -// private static class IngestModuleXMLDescriptor { -// -// enum MODULE_TYPE { -// PLUGIN { -// @Override -// public String toString() { -// return "plugin"; -// } -// }; -// } -// -// private static final String XML_MODULE_EL = "MODULE"; -// private static final String XML_MODULE_ORDER_ATTR = "order"; -// private static final String XML_MODULE_TYPE_ATTR = "type"; -// private static final String XML_MODULE_LOC_ATTR = "location"; -// private static final String XML_MODULE_ARGS_ATTR = "arguments"; -// int order; -// String type; -// String location; -// String arguments; -// boolean valid = false; // if passed validation -// } -//} - -/** - * Exception thrown when errors occur while loading modules - */ -//class IngestModuleLoaderException extends Throwable { -// -// public IngestModuleLoaderException(String message) { -// super(message); -// } -// -// public IngestModuleLoaderException(String message, Throwable cause) { -// super(message, cause); -// } -//} - -/** - * Implements mapping of a type to ingest module interface type - */ -//interface IngestModuleMapping { -// -// /** -// * Get ingest module interface mapped to that type -// * -// * @return ingest module interface meta type -// */ -// public Class getIngestModuleInterface(); -//} - -/** - * Custom class loader that attempts to force class resolution / linkage validation at loading - */ -//class CustomClassLoader extends ClassLoader { -// private static final Logger logger = Logger.getLogger(CustomClassLoader.class.getName()); -// -// CustomClassLoader(ClassLoader parent) { -// super(parent); -// } -// -// -// @Override -// public Class loadClass(String name) throws ClassNotFoundException { -// logger.log(Level.INFO, "Custom loading class: " + name); -// -// Class cl = super.loadClass(name, true); -// -// return cl; -// } -} + List getIngestModuleFactories() { + return moduleFactories; + } +} \ No newline at end of file diff --git a/ExifParser/nbproject/project.xml b/ExifParser/nbproject/project.xml index da91e0b898..15439ab1c2 100644 --- a/ExifParser/nbproject/project.xml +++ b/ExifParser/nbproject/project.xml @@ -6,6 +6,14 @@ org.sleuthkit.autopsy.exifparser + + org.openide.util.lookup + + + + 8.19.1 + + org.sleuthkit.autopsy.core diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java index 49bc81e4ea..036b6be06c 100644 --- a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2013 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,229 +16,193 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -//package org.sleuthkit.autopsy.exifparser; -// -//import com.drew.imaging.ImageMetadataReader; -//import com.drew.imaging.ImageProcessingException; -//import com.drew.lang.GeoLocation; -//import com.drew.lang.Rational; -//import com.drew.metadata.Metadata; -//import com.drew.metadata.exif.ExifIFD0Directory; -//import com.drew.metadata.exif.ExifSubIFDDirectory; -//import com.drew.metadata.exif.GpsDirectory; -//import java.io.BufferedInputStream; -//import java.io.IOException; -//import java.io.InputStream; -//import java.util.ArrayList; -//import java.util.Collection; -//import java.util.Date; -//import java.util.logging.Level; -//import org.sleuthkit.autopsy.coreutils.ImageUtils; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import org.sleuthkit.autopsy.coreutils.Version; -//import org.sleuthkit.autopsy.ingest.IngestServices; -//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -//import org.sleuthkit.datamodel.AbstractFile; -//import org.sleuthkit.datamodel.BlackboardArtifact; -//import org.sleuthkit.datamodel.BlackboardAttribute; -//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -//import org.sleuthkit.datamodel.ReadContentInputStream; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.TskData; -//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; + +package org.sleuthkit.autopsy.exifparser; + +import com.drew.imaging.ImageMetadataReader; +import com.drew.imaging.ImageProcessingException; +import com.drew.lang.GeoLocation; +import com.drew.lang.Rational; +import com.drew.metadata.Metadata; +import com.drew.metadata.exif.ExifIFD0Directory; +import com.drew.metadata.exif.ExifSubIFDDirectory; +import com.drew.metadata.exif.GpsDirectory; +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.ImageUtils; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +import org.sleuthkit.datamodel.ReadContentInputStream; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; /** * Ingest module to parse image Exif metadata. Currently only supports JPEG * files. Ingests an image file and, if available, adds it's date, latitude, * longitude, altitude, device model, and device make to a blackboard artifact. */ -//public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { -// -// private IngestServices services; -// final public static String MODULE_NAME = "Exif Parser"; -// final public static String MODULE_VERSION = Version.getVersion(); -// private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); -// private static ExifParserFileIngestModule defaultInstance = null; -// private int filesProcessed = 0; -// private boolean filesToFire = false; -// -// //file ingest modules require a private constructor -// //to ensure singleton instances -// private ExifParserFileIngestModule() { -// } -// -// //default instance used for module registration -// public static synchronized ExifParserFileIngestModule getDefault() { -// if (defaultInstance == null) { -// defaultInstance = new ExifParserFileIngestModule(); -// } -// return defaultInstance; -// } -// -// @Override -// public IngestModuleAbstractFile.ProcessResult process(PipelineContext pipelineContext, AbstractFile content) { -// -// //skip unalloc -// if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { -// return IngestModuleAbstractFile.ProcessResult.OK; -// } -// -// // skip known -// if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { -// return IngestModuleAbstractFile.ProcessResult.OK; -// } -// -// // update the tree every 1000 files if we have EXIF data that is not being being displayed -// filesProcessed++; -// if ((filesToFire) && (filesProcessed % 1000 == 0)) { -// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); -// filesToFire = false; -// } -// -// //skip unsupported -// if (!parsableFormat(content)) { -// return IngestModuleAbstractFile.ProcessResult.OK; -// } -// -// return processFile(content); -// } -// -// public IngestModuleAbstractFile.ProcessResult processFile(AbstractFile f) { -// InputStream in = null; -// BufferedInputStream bin = null; -// -// try { -// in = new ReadContentInputStream(f); -// bin = new BufferedInputStream(in); -// -// Collection attributes = new ArrayList(); -// Metadata metadata = ImageMetadataReader.readMetadata(bin, true); -// -// // Date -// ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class); -// if (exifDir != null) { -// Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); -// if (date != null) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000)); -// } -// } -// -// // GPS Stuff -// GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); -// if (gpsDir != null) { -// GeoLocation loc = gpsDir.getGeoLocation(); -// if (loc != null) { -// double latitude = loc.getLatitude(); -// double longitude = loc.getLongitude(); -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude)); -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude)); -// } -// -// Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); -// if (altitude != null) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue())); -// } -// } -// -// // Device info -// ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); -// if (devDir != null) { -// String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); -// if (model != null && !model.isEmpty()) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model)); -// } -// -// String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); -// if (make != null && !make.isEmpty()) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make)); -// } -// } -// -// // Add the attributes, if there are any, to a new artifact -// if (!attributes.isEmpty()) { -// BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); -// bba.addAttributes(attributes); -// filesToFire = true; -// } -// -// return IngestModuleAbstractFile.ProcessResult.OK; -// -// } catch (TskCoreException ex) { -// logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata (" + ex.getLocalizedMessage() + ")."); -// } catch (ImageProcessingException ex) { -// logger.log(Level.WARNING, "Failed to process the image file: " + f.getParentPath() + "/" + f.getName() + "(" + ex.getLocalizedMessage() + ")"); -// } catch (IOException ex) { -// logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); -// } finally { -// try { -// if (in != null) { -// in.close(); -// } -// if (bin != null) { -// bin.close(); -// } -// } catch (IOException ex) { -// logger.log(Level.WARNING, "Failed to close InputStream.", ex); -// } -// } -// -// // If we got here, there was an error -// return IngestModuleAbstractFile.ProcessResult.ERROR; -// } -// -// /** -// * Checks if should try to attempt to extract exif. Currently checks if JPEG -// * image (by signature) -// * -// * @param f file to be checked -// * -// * @return true if to be processed -// */ -// private boolean parsableFormat(AbstractFile f) { -// return ImageUtils.isJpegFileHeader(f); -// } -// -// @Override -// public void complete() { -// logger.log(Level.INFO, "completed exif parsing " + this.toString()); -// if (filesToFire) { -// //send the final new data event -// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); -// } -// } -// -// @Override -// public String getVersion() { -// return MODULE_VERSION; -// } -// -// @Override -// public String getName() { -// return "Exif Image Parser"; -// } -// -// @Override -// public String getDescription() { -// return "Ingests JPEG files and retrieves their EXIF metadata."; -// } -// -// @Override -// public void init(IngestModuleInit initContext) { -// services = IngestServices.getDefault(); -// logger.log(Level.INFO, "init() " + this.toString()); -// -// filesProcessed = 0; -// filesToFire = false; -// } -// -// @Override -// public void stop() { -// } -// -// @Override -// public boolean hasBackgroundJobsRunning() { -// return false; -// } -//} \ No newline at end of file +public final class ExifParserFileIngestModule implements FileIngestModule { + + private IngestServices services; + private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); + private int filesProcessed = 0; + private boolean filesToFire = false; + + ExifParserFileIngestModule() { + } + + @Override + public String getDisplayName() { + return ExifParserModuleFactory.getModuleName(); + } + + @Override + public void init(long taskId) { + services = IngestServices.getDefault(); + logger.log(Level.INFO, "init() {0}", this.toString()); + filesProcessed = 0; + filesToFire = false; + } + + @Override + public void process(AbstractFile content) { + + //skip unalloc + if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { + return; + } + + // skip known + if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { + return; + } + + // update the tree every 1000 files if we have EXIF data that is not being being displayed + filesProcessed++; + if ((filesToFire) && (filesProcessed % 1000 == 0)) { + services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + filesToFire = false; + } + + //skip unsupported + if (!parsableFormat(content)) { + return; + } + + processFile(content); + } + + public void processFile(AbstractFile f) { + InputStream in = null; + BufferedInputStream bin = null; + + try { + in = new ReadContentInputStream(f); + bin = new BufferedInputStream(in); + + Collection attributes = new ArrayList(); + Metadata metadata = ImageMetadataReader.readMetadata(bin, true); + + // Date + ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class); + if (exifDir != null) { + Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); + if (date != null) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), getDisplayName(), date.getTime() / 1000)); + } + } + + // GPS Stuff + GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); + if (gpsDir != null) { + GeoLocation loc = gpsDir.getGeoLocation(); + if (loc != null) { + double latitude = loc.getLatitude(); + double longitude = loc.getLongitude(); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), getDisplayName(), latitude)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), getDisplayName(), longitude)); + } + + Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); + if (altitude != null) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), getDisplayName(), altitude.doubleValue())); + } + } + + // Device info + ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); + if (devDir != null) { + String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); + if (model != null && !model.isEmpty()) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), getDisplayName(), model)); + } + + String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); + if (make != null && !make.isEmpty()) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), getDisplayName(), make)); + } + } + + // Add the attributes, if there are any, to a new artifact + if (!attributes.isEmpty()) { + BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); + bba.addAttributes(attributes); + filesToFire = true; + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage()); + } catch (ImageProcessingException ex) { + logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()}); + } catch (IOException ex) { + logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); + } finally { + try { + if (in != null) { + in.close(); + } + if (bin != null) { + bin.close(); + } + } catch (IOException ex) { + logger.log(Level.WARNING, "Failed to close InputStream.", ex); + } + } + } + + /** + * Checks if should try to attempt to extract exif. Currently checks if JPEG + * image (by signature) + * + * @param f file to be checked + * + * @return true if to be processed + */ + private boolean parsableFormat(AbstractFile f) { + return ImageUtils.isJpegFileHeader(f); + } + + @Override + public void complete() { + logger.log(Level.INFO, "completed exif parsing {0}", this.toString()); + if (filesToFire) { + //send the final new data event + services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + } + } + + @Override + public void stop() { + } +} \ No newline at end of file diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java new file mode 100755 index 0000000000..9595171390 --- /dev/null +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java @@ -0,0 +1,62 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.exifparser; + +import java.io.Serializable; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do hash database lookups. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class ExifParserModuleFactory extends AbstractIngestModuleFactory { + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return "Exif Image Parser"; + } + + @Override + public String getModuleDescription() { + return "Ingests JPEG files and retrieves their EXIF metadata."; + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + @Override + public boolean isFileIngestModuleFactory() { + return true; + } + + @Override + public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException { + return new ExifParserFileIngestModule(); + } +} diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 56da384573..3ba5f954df 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -66,7 +66,7 @@ public class HashDbIngestModule implements FileIngestModule { } @Override - public void init(long dataSourceTaskId) { + public void init(long taskId) { services = IngestServices.getDefault(); skCase = Case.getCurrentCase().getSleuthkitCase(); @@ -76,22 +76,20 @@ public class HashDbIngestModule implements FileIngestModule { calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes(); if (knownHashSets.isEmpty()) { - // RJCTODO -// services.postMessage(IngestMessage.createWarningMessage(++messageId, -// this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.noKnownHashDbSetMsg"), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); + services.postMessage(IngestMessage.createWarningMessage(++messageId, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); } if (knownBadHashSets.isEmpty()) { - // RJCTODO -// services.postMessage(IngestMessage.createWarningMessage(++messageId, -// this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.noKnownBadHashDbSetMsg"), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); + services.postMessage(IngestMessage.createWarningMessage(++messageId, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownBadHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); } } @@ -122,7 +120,6 @@ public class HashDbIngestModule implements FileIngestModule { // bail out if we have no hashes set if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) { -// return ProcessResult.OK; return; } @@ -136,14 +133,14 @@ public class HashDbIngestModule implements FileIngestModule { calctime += (System.currentTimeMillis() - calcstart); } catch (IOException ex) { logger.log(Level.WARNING, "Error calculating hash of file " + name, ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.fileReadErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.calcHashValueErr", -// name))); + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.fileReadErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.calcHashValueErr", + name))); // return ProcessResult.ERROR; return; } @@ -163,14 +160,14 @@ public class HashDbIngestModule implements FileIngestModule { skCase.setKnown(file, TskData.FileKnown.BAD); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known bad state for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.settingKnownBadStateErr", -// name))); + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.hashLookupErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.settingKnownBadStateErr", + name))); // ret = ProcessResult.ERROR; } String hashSetName = db.getHashSetName(); @@ -194,15 +191,16 @@ public class HashDbIngestModule implements FileIngestModule { lookuptime += (System.currentTimeMillis() - lookupstart); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't lookup known bad hash for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.lookingUpKnownBadHashValueErr", -// name))); -// ret = ProcessResult.ERROR; + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.hashLookupErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.lookingUpKnownBadHashValueErr", + name))); +// RJCTODO + // ret = ProcessResult.ERROR; } } @@ -219,29 +217,23 @@ public class HashDbIngestModule implements FileIngestModule { break; } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.settingsKnownStateErr", -// name))); -// ret = ProcessResult.ERROR; + // RJCTODO + // ret = ProcessResult.ERROR; } } lookuptime += (System.currentTimeMillis() - lookupstart); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't lookup known hash for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.lookingUpKnownHashValueErr", -// name))); -// ret = ProcessResult.ERROR; + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.hashLookupErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.lookingUpKnownHashValueErr", + name))); + // RJCTODO + // ret = ProcessResult.ERROR; } } } @@ -294,13 +286,13 @@ public class HashDbIngestModule implements FileIngestModule { detailsSb.append(""); -// services.postMessage(IngestMessage.createDataMessage(++messageId, this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.postToBB.knownBadMsg", -// abstractFile.getName()), -// detailsSb.toString(), -// abstractFile.getName() + md5Hash, -// badFile)); + services.postMessage(IngestMessage.createDataMessage(++messageId, this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.postToBB.knownBadMsg", + abstractFile.getName()), + detailsSb.toString(), + abstractFile.getName() + md5Hash, + badFile)); } services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile))); } catch (TskException ex) { @@ -337,12 +329,12 @@ public class HashDbIngestModule implements FileIngestModule { } detailsSb.append(""); -// services.postMessage(IngestMessage.createMessage(++messageId, -// IngestMessage.MessageType.INFO, -// this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.complete.hashLookupResults"), -// detailsSb.toString())); + services.postMessage(IngestMessage.createMessage(++messageId, + IngestMessage.MessageType.INFO, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.complete.hashLookupResults"), + detailsSb.toString())); } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index fc4679ae0e..0ffb87527c 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -50,10 +50,10 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -77,7 +77,7 @@ import org.sleuthkit.datamodel.TskData.FileKnown; * * Registered as a module in layer.xml */ -public final class KeywordSearchIngestModule { +public final class KeywordSearchIngestModule implements FileIngestModule { enum UpdateFrequency { @@ -102,7 +102,6 @@ public final class KeywordSearchIngestModule { public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleDescription"); final public static String MODULE_VERSION = Version.getVersion(); - private static KeywordSearchIngestModule instance = null; private IngestServices services; private Ingester ingester = null; private volatile boolean commitIndex = false; //whether to commit index next time @@ -146,28 +145,110 @@ public final class KeywordSearchIngestModule { }; private Map ingestStatus; - //private constructor to ensure singleton instance - private KeywordSearchIngestModule() { + KeywordSearchIngestModule() { } /** - * Returns singleton instance of the module, creates one if needed + * Initializes the module for new ingest run Sets up threads, timers, + * retrieves settings, keyword lists to run on * - * @return instance of the module */ - public static synchronized KeywordSearchIngestModule getDefault() { - if (instance == null) { - instance = new KeywordSearchIngestModule(); - } - return instance; - } + @Override + public void init(long taskId) { + logger.log(Level.INFO, "init()"); + services = IngestServices.getDefault(); + initialized = false; + caseHandle = Case.getCurrentCase().getSleuthkitCase(); + + tikaFormatDetector = new Tika(); + + ingester = Server.getIngester(); + + final Server server = KeywordSearch.getServer(); + try { + if (!server.isRunning()) { + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + logger.log(Level.SEVERE, msg); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + return; + + } + } catch (KeywordSearchModuleException ex) { + logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); + //this means Solr is not properly initialized + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + return; + } + + + //initialize extractors + stringExtractor = new AbstractFileStringExtract(); + stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); + stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); + + + //log the scripts used for debugging + final StringBuilder sbScripts = new StringBuilder(); + for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { + sbScripts.append(s.name()).append(" "); + } + logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString()); + + textExtractors = new ArrayList<>(); + //order matters, more specific extractors first + textExtractors.add(new AbstractFileHtmlExtract()); + textExtractors.add(new AbstractFileTikaTextExtract()); + + + ingestStatus = new HashMap<>(); + + keywords = new ArrayList<>(); + keywordLists = new ArrayList<>(); + keywordToList = new HashMap<>(); + + initKeywords(); + + if (keywords.isEmpty() || keywordLists.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); + } + + processedFiles = false; + finalSearcherDone = false; + searcherDone = true; //make sure to start the initial currentSearcher + //keeps track of all results per run not to repeat reporting the same hits + currentResults = new HashMap<>(); + + curDataSourceIds = new HashSet<>(); + + indexer = new Indexer(); + + final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; + logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs); + logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs); + + commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); + searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); + + initialized = true; + + commitTimer.start(); + searchTimer.start(); + } + + @Override public void process(AbstractFile abstractFile) { if (initialized == false) //error initializing indexing/Solr { - logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); + logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + return; + // RJCTODO // return ProcessResult.OK; } try { @@ -181,9 +262,12 @@ public final class KeywordSearchIngestModule { if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { //skip indexing of virtual dirs (no content, no real name) - will index children files + return; + // RJCTODO // return ProcessResult.OK; } + // RJCTODO //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it // if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { // indexer.indexFile(abstractFile, false); @@ -195,7 +279,8 @@ public final class KeywordSearchIngestModule { if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { //index meta-data only indexer.indexFile(abstractFile, false); -// return ProcessResult.OK; + // RJCTODO + // return ProcessResult.OK; } processedFiles = true; @@ -206,6 +291,7 @@ public final class KeywordSearchIngestModule { //index the file and content (if the content is supported) indexer.indexFile(abstractFile, true); + // RJCTODO // return ProcessResult.OK; } @@ -213,6 +299,7 @@ public final class KeywordSearchIngestModule { * After all files are ingested, execute final index commit and final search * Cleanup resources, threads, timers */ + @Override public void complete() { if (initialized == false) { return; @@ -249,12 +336,10 @@ public final class KeywordSearchIngestModule { try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); - logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); - logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); - } catch (NoOpenCoreException ex) { + logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles); + logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks); + } catch (NoOpenCoreException | KeywordSearchModuleException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); - } catch (KeywordSearchModuleException se) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); } //cleanup done in final searcher @@ -265,6 +350,7 @@ public final class KeywordSearchIngestModule { /** * Handle stop event (ingest interrupted) Cleanup resources, threads, timers */ + @Override public void stop() { logger.log(Level.INFO, "stop()"); @@ -319,152 +405,20 @@ public final class KeywordSearchIngestModule { initialized = false; } - public String getName() { - return MODULE_NAME; - } - - public String getDescription() { - return MODULE_DESCRIPTION; - } - - public String getVersion() { - return MODULE_VERSION; - } - - /** - * Initializes the module for new ingest run Sets up threads, timers, - * retrieves settings, keyword lists to run on - * - */ - public void init(IngestModuleInit initContext) { - logger.log(Level.INFO, "init()"); - services = IngestServices.getDefault(); - initialized = false; - - caseHandle = Case.getCurrentCase().getSleuthkitCase(); - - tikaFormatDetector = new Tika(); - - ingester = Server.getIngester(); - - final Server server = KeywordSearch.getServer(); - try { - if (!server.isRunning()) { - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - logger.log(Level.SEVERE, msg); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); -// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - - } - } catch (KeywordSearchModuleException ex) { - logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); - //this means Solr is not properly initialized - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); -// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - } - - - //initialize extractors - stringExtractor = new AbstractFileStringExtract(); - stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); - stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); - - - //log the scripts used for debugging - final StringBuilder sbScripts = new StringBuilder(); - for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { - sbScripts.append(s.name()).append(" "); - } - logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); - - textExtractors = new ArrayList(); - //order matters, more specific extractors first - textExtractors.add(new AbstractFileHtmlExtract()); - textExtractors.add(new AbstractFileTikaTextExtract()); - - - ingestStatus = new HashMap(); - - keywords = new ArrayList(); - keywordLists = new ArrayList(); - keywordToList = new HashMap(); - - initKeywords(); - - if (keywords.isEmpty() || keywordLists.isEmpty()) { -// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), -// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); - } - - processedFiles = false; - finalSearcherDone = false; - searcherDone = true; //make sure to start the initial currentSearcher - //keeps track of all results per run not to repeat reporting the same hits - currentResults = new HashMap>(); - - curDataSourceIds = new HashSet(); - - indexer = new Indexer(); - - final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; - logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); - logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); - - commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); - searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); - - initialized = true; - - commitTimer.start(); - searchTimer.start(); - } - - public boolean hasSimpleConfiguration() { - return true; - } - - public boolean hasAdvancedConfiguration() { - return true; - } - - public javax.swing.JPanel getSimpleConfiguration(String context) { - KeywordSearchListsXML.getCurrent().reload(); - - if (null == simpleConfigPanel) { - simpleConfigPanel = new KeywordSearchIngestSimplePanel(); - } - else { - simpleConfigPanel.load(); - } - - return simpleConfigPanel; - } - - public javax.swing.JPanel getAdvancedConfiguration(String context) { - if (advancedConfigPanel == null) { - advancedConfigPanel = new KeywordSearchConfigurationPanel(); - } - - advancedConfigPanel.load(); - return advancedConfigPanel; - } - - public void saveAdvancedConfiguration() { - if (advancedConfigPanel != null) { - advancedConfigPanel.store(); - } - - if (simpleConfigPanel != null) { - simpleConfigPanel.load(); - } - } - - public void saveSimpleConfiguration() { - KeywordSearchListsXML.getCurrent().save(); - } + // RJCTODO +// public void saveAdvancedConfiguration() { +// if (advancedConfigPanel != null) { +// advancedConfigPanel.store(); +// } +// +// if (simpleConfigPanel != null) { +// simpleConfigPanel.load(); +// } +// } +// +// public void saveSimpleConfiguration() { +// KeywordSearchListsXML.getCurrent().save(); +// } /** * The modules maintains background threads, return true if background @@ -473,15 +427,15 @@ public final class KeywordSearchIngestModule { * * @return */ - public boolean hasBackgroundJobsRunning() { - if ((currentSearcher != null && searcherDone == false) - || (finalSearcherDone == false)) { - return true; - } else { - return false; - } - - } + // RJCTODO: +// public boolean hasBackgroundJobsRunning() { +// if ((currentSearcher != null && searcherDone == false) +// || (finalSearcherDone == false)) { +// return true; +// } else { +// return false; +// } +// } /** * Commits index and notifies listeners of index update @@ -540,7 +494,7 @@ public final class KeywordSearchIngestModule { msg.append("").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("").append(error_io).append(""); msg.append(""); String indexStats = msg.toString(); - logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); + logger.log(Level.INFO, "Keyword Indexing Completed: {0}", indexStats); // RJCTODO // services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats)); if (error_index > 0) { @@ -561,10 +515,8 @@ public final class KeywordSearchIngestModule { try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles)); - } catch (NoOpenCoreException ex) { + } catch (NoOpenCoreException | KeywordSearchModuleException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); - } catch (KeywordSearchModuleException se) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); } } @@ -611,7 +563,7 @@ public final class KeywordSearchIngestModule { } - logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString()); + logger.log(Level.INFO, "Set new effective keyword lists: {0}", sb.toString()); } @@ -703,8 +655,7 @@ public final class KeywordSearchIngestModule { } if (fileExtract == null) { - logger.log(Level.INFO, "No text extractor found for file id:" - + aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat); + logger.log(Level.INFO, "No text extractor found for file id:{0}, name: {1}, detected format: {2}", new Object[]{aFile.getId(), aFile.getName(), detectedFormat}); return false; } @@ -727,7 +678,7 @@ public final class KeywordSearchIngestModule { ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); return true; } else { - logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); + logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); return false; } @@ -833,7 +784,7 @@ public final class KeywordSearchIngestModule { try { //logger.log(Level.INFO, "indexing: " + aFile.getName()); if (!extractTextAndIndex(aFile, detectedFormat)) { - logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); + logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); } else { ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); @@ -877,9 +828,9 @@ public final class KeywordSearchIngestModule { private boolean finalRun = false; Searcher(List keywordLists) { - this.keywordLists = new ArrayList(keywordLists); - this.keywords = new ArrayList(); - this.keywordToList = new HashMap(); + this.keywordLists = new ArrayList<>(keywordLists); + this.keywords = new ArrayList<>(); + this.keywordToList = new HashMap<>(); //keywords are populated as searcher runs } @@ -944,7 +895,7 @@ public final class KeywordSearchIngestModule { for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); + logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keywordQuery.getQuery()); return null; } @@ -975,7 +926,7 @@ public final class KeywordSearchIngestModule { final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds); del.addFilter(dataSourceFilter); - Map> queryResult = null; + Map> queryResult; try { queryResult = del.performQuery(); @@ -986,7 +937,7 @@ public final class KeywordSearchIngestModule { //likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { - logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); + logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); @@ -1002,7 +953,7 @@ public final class KeywordSearchIngestModule { //write results to BB //new artifacts created, to report to listeners - Collection newArtifacts = new ArrayList(); + Collection newArtifacts = new ArrayList<>(); //scale progress bar more more granular, per result sub-progress, within per keyword int totalUnits = newResults.size(); @@ -1019,7 +970,7 @@ public final class KeywordSearchIngestModule { for (final Keyword hitTerm : newResults.keySet()) { //checking for cancellation between results if (this.isCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery()); + logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: {0}", keywordQuery.getQuery()); return null; } @@ -1036,7 +987,7 @@ public final class KeywordSearchIngestModule { for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { // get the snippet for the first hit in the file - String snippet = null; + String snippet; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); int chunkId = contentHitsFlattened.get(hitFile); try { @@ -1053,7 +1004,7 @@ public final class KeywordSearchIngestModule { // write the blackboard artifact for this keyword in this file KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { - logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); + logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hitFile, hitTerm.toString()}); continue; } @@ -1128,7 +1079,7 @@ public final class KeywordSearchIngestModule { } detailsSb.append(""); -// services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); + services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } //for each file hit @@ -1156,7 +1107,7 @@ public final class KeywordSearchIngestModule { try { finalizeSearcher(); stopWatch.stop(); - logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); + logger.log(Level.INFO, "Searcher took to run: {0} secs.", stopWatch.getElapsedTimeSecs()); } finally { searcherLock.unlock(); } @@ -1226,13 +1177,13 @@ public final class KeywordSearchIngestModule { //calculate new results but substracting results already obtained in this ingest //update currentResults map with the new results private Map> filterResults(Map> queryResult, boolean isRegex) { - Map> newResults = new HashMap>(); + Map> newResults = new HashMap<>(); for (String termResult : queryResult.keySet()) { List queryTermResults = queryResult.get(termResult); //translate to list of IDs that we keep track of - List queryTermResultsIDs = new ArrayList(); + List queryTermResultsIDs = new ArrayList<>(); for (ContentHit ch : queryTermResults) { queryTermResultsIDs.add(ch.getId()); } @@ -1249,7 +1200,7 @@ public final class KeywordSearchIngestModule { //add to new results List newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { - newResultsFs = new ArrayList(); + newResultsFs = new ArrayList<>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java new file mode 100755 index 0000000000..1e3c559082 --- /dev/null +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java @@ -0,0 +1,121 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.keywordsearch; + +import java.io.Serializable; +import javax.swing.JPanel; +import org.openide.util.NbBundle; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do keyword searches. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class KeywordSearchModuleFactory extends AbstractIngestModuleFactory { + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleName"); + } + + @Override + public String getModuleDescription() { + return NbBundle.getMessage(KeywordSearchIngestModule.class, "HashDbInKeywordSearchIngestModulegestModule.moduleDescription"); + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + @Override + public Serializable getDefaultIngestOptions() { + return new IngestOptions(); + } + + @Override + public boolean providesIngestOptionsPanels() { + return true; + } + + @Override + public JPanel getIngestOptionsPanel(Serializable ingestOptions) { + KeywordSearchListsXML.getCurrent().reload(); + return new KeywordSearchIngestSimplePanel(); // RJCTODO: Load required? + } + + @Override + public Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws IngestModuleFactory.InvalidOptionsException { + if (!(ingestOptionsPanel instanceof KeywordSearchIngestSimplePanel)) { + throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO + } + + KeywordSearchIngestSimplePanel panel = (KeywordSearchIngestSimplePanel)ingestOptionsPanel; + panel.store(); + + return new IngestOptions(); // RJCTODO + } + + @Override + public boolean providesGlobalOptionsPanels() { + return true; + } + + @Override + public JPanel getGlobalOptionsPanel() { + KeywordSearchConfigurationPanel globalOptionsPanel = new KeywordSearchConfigurationPanel(); + globalOptionsPanel.load(); + return globalOptionsPanel; + } + + @Override + public void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws IngestModuleFactory.InvalidOptionsException { + if (!(globalOptionsPanel instanceof KeywordSearchConfigurationPanel)) { + throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO + } + + KeywordSearchConfigurationPanel panel = (KeywordSearchConfigurationPanel)globalOptionsPanel; + panel.store(); + // RJCTODO: Need simple panel store? May need to change implementation...see also hash db factory + } + + @Override + public boolean isFileIngestModuleFactory() { + return true; + } + + @Override + public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException { + return new KeywordSearchIngestModule(); + } + + private static class IngestOptions implements Serializable { + // RJCTODO: Any options here? + // boolean alwaysCalcHashes = true; +// ArrayList hashSetNames = new ArrayList<>(); + } +} diff --git a/ewfVerify/nbproject/project.xml b/ewfVerify/nbproject/project.xml index a3955c75fa..1c9b1dd905 100755 --- a/ewfVerify/nbproject/project.xml +++ b/ewfVerify/nbproject/project.xml @@ -6,6 +6,14 @@ org.sleuthkit.autopsy.ewfverify + + org.openide.util.lookup + + + + 8.19.1 + + org.sleuthkit.autopsy.core diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java new file mode 100755 index 0000000000..8087d249b0 --- /dev/null +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java @@ -0,0 +1,63 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ewfverify; + +import java.io.Serializable; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do hash database lookups. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class EwfVerifierModuleFactory extends AbstractIngestModuleFactory { + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return "EWF Verify"; // RJCTODO: Is this what we want here? + } + + @Override + public String getModuleDescription() { + return "Validates the integrity of E01 files."; + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + + @Override + public boolean isDataSourceIngestModuleFactory() { + return true; + } + + @Override + public DataSourceIngestModule createDataSourceIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException { + return new EwfVerifyIngestModule(); + } +} diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index b006eb386e..3140135375 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013 Basis Technology Corp. + * Copyright 2013-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,204 +16,180 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -//package org.sleuthkit.autopsy.ewfverify; -// -//import java.security.MessageDigest; -//import java.security.NoSuchAlgorithmException; -//import java.util.logging.Level; -//import java.util.logging.Logger; -//import javax.xml.bind.DatatypeConverter; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.autopsy.coreutils.Version; -//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -//import org.sleuthkit.autopsy.ingest.IngestMessage; -//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.autopsy.ingest.IngestServices; -//import org.sleuthkit.datamodel.Content; -//import org.sleuthkit.datamodel.Image; -//import org.sleuthkit.datamodel.SleuthkitCase; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.TskData; -///** -// * Data Source Ingest Module that generates a hash of an E01 image file and -// * verifies it with the value stored in the image. -// * -// * @author jwallace -// */ -//public class EwfVerifyIngestModule extends IngestModuleDataSource { -// private static final String MODULE_NAME = "EWF Verify"; -// private static final String MODULE_VERSION = Version.getVersion(); -// private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; -// private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; -// private IngestServices services; -// private volatile boolean running = false; -// private Image img; -// private String imgName; -// private MessageDigest messageDigest; -// private static Logger logger = null; -// private static int messageId = 0; -// private boolean verified = false; -// private boolean skipped = false; -// private String calculatedHash = ""; -// private String storedHash = ""; -// private SleuthkitCase skCase; -// -// public EwfVerifyIngestModule() { -// } -// -// @Override -// public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { -// imgName = dataSource.getName(); -// try { -// img = dataSource.getImage(); -// } catch (TskCoreException ex) { -// img = null; -// logger.log(Level.SEVERE, "Failed to get image from Content.", ex); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, -// "Error processing " + imgName)); -// return; -// } -// -// // Skip images that are not E01 -// if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { -// img = null; -// logger.log(Level.INFO, "Skipping non-ewf image " + imgName); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, -// "Skipping non-ewf image " + imgName)); -// skipped = true; -// return; -// } -// -// -// if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) -// { -// storedHash = img.getMd5().toLowerCase(); -// logger.info("Hash value stored in " + imgName + ": " + storedHash); -// -// } -// else { -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, -// "Image " + imgName + " does not have stored hash.")); -// return; -// } -// -// logger.log(Level.INFO, "Starting ewf verification of " + img.getName()); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, -// "Starting " + imgName)); -// -// long size = img.getSize(); -// if (size == 0) { -// logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried."); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, -// "Error getting size of " + imgName + ". Image will not be processed.")); -// } -// -// // Libewf uses a sector size of 64 times the sector size, which is the -// // motivation for using it here. -// long chunkSize = 64 * img.getSsize(); -// chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; -// -// int totalChunks = (int) Math.ceil(size / chunkSize); -// logger.log(Level.INFO, "Total chunks = " + totalChunks); -// int read; -// -// byte[] data; -// controller.switchToDeterminate(totalChunks); -// -// running = true; -// // Read in byte size chunks and update the hash value with the data. -// for (int i = 0; i < totalChunks; i++) { -// if (controller.isCancelled()) { -// running = false; -// return; -// } -// data = new byte[ (int) chunkSize ]; -// try { -// read = img.read(data, i * chunkSize, chunkSize); -// } catch (TskCoreException ex) { -// String msg = "Error reading " + imgName + " at chunk " + i; -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); -// logger.log(Level.SEVERE, msg, ex); -// return; -// } -// messageDigest.update(data); -// controller.progress(i); -// } -// -// // Finish generating the hash and get it as a string value -// calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); -// verified = calculatedHash.equals(storedHash); -// logger.info("Hash calculated from " + imgName + ": " + calculatedHash); -// running = false; -// } -// -// @Override -// public void init(IngestModuleInit initContext) { -// services = IngestServices.getDefault(); -// skCase = Case.getCurrentCase().getSleuthkitCase(); -// running = false; -// verified = false; -// skipped = false; -// img = null; -// imgName = ""; -// storedHash = ""; -// calculatedHash = ""; -// -// if (logger == null) { -// logger = services.getLogger(this); -// } -// -// if (messageDigest == null) { -// try { -// messageDigest = MessageDigest.getInstance("MD5"); -// } catch (NoSuchAlgorithmException ex) { -// logger.log(Level.WARNING, "Error getting md5 algorithm", ex); -// throw new RuntimeException("Failed to get MD5 algorithm"); -// } -// } else { -// messageDigest.reset(); -// } -// } -// -// @Override -// public void complete() { -// logger.info("complete() " + this.getName()); -// if (skipped == false) { -// String msg = verified ? " verified" : " not verified"; -// String extra = "

EWF Verification Results for " + imgName + "

"; -// extra += "
  • Result:" + msg + "
  • "; -// extra += "
  • Calculated hash: " + calculatedHash + "
  • "; -// extra += "
  • Stored hash: " + storedHash + "
  • "; -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); -// logger.info(imgName + msg); -// } -// } -// -// @Override -// public void stop() { -// running = false; -// } -// -// @Override -// public String getName() { -// return MODULE_NAME; -// } -// -// @Override -// public String getVersion() { -// return MODULE_VERSION; -// } -// -// @Override -// public String getDescription() { -// return MODULE_DESCRIPTION; -// } -// -// @Override -// public boolean hasBackgroundJobsRunning() { -// return running; -// } -//} +package org.sleuthkit.autopsy.ewfverify; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.xml.bind.DatatypeConverter; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; + +/** + * Data source ingest module that verifies the integrity of an Expert Witness + * Format (EWF) E01 image file by generating a hash of the file and comparing it + * to the value stored in the image. + */ +public class EwfVerifyIngestModule implements DataSourceIngestModule { + private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; + private IngestServices services; + private volatile boolean running = false; + private Image img; + private String imgName; + private MessageDigest messageDigest; + private static Logger logger = null; + private static int messageId = 0; + private boolean verified = false; + private boolean skipped = false; + private String calculatedHash = ""; + private String storedHash = ""; + + EwfVerifyIngestModule() { + } + + @Override + public String getDisplayName() { + return EwfVerifierModuleFactory.getModuleName(); + } + + @Override + public void init(long taskId) { + services = IngestServices.getDefault(); + running = false; + verified = false; + skipped = false; + img = null; + imgName = ""; + storedHash = ""; + calculatedHash = ""; + + if (logger == null) { + logger = services.getLogger(this); + } + + if (messageDigest == null) { + try { + messageDigest = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException ex) { + logger.log(Level.WARNING, "Error getting md5 algorithm", ex); + throw new RuntimeException("Failed to get MD5 algorithm"); + } + } else { + messageDigest.reset(); + } + } + + @Override + public void process(Content dataSource, IngestDataSourceWorkerController statusHelper) { + imgName = dataSource.getName(); + try { + img = dataSource.getImage(); + } catch (TskCoreException ex) { + img = null; + logger.log(Level.SEVERE, "Failed to get image from Content.", ex); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error processing " + imgName)); + return; + } + + // Skip images that are not E01 + if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { + img = null; + logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, + "Skipping non-ewf image " + imgName)); + skipped = true; + return; + } + + if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) + { + storedHash = img.getMd5().toLowerCase(); + logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash}); + + } + else { + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Image " + imgName + " does not have stored hash.")); + return; + } + + logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName()); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, + "Starting " + imgName)); + + long size = img.getSize(); + if (size == 0) { + logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error getting size of " + imgName + ". Image will not be processed.")); + } + + // Libewf uses a sector size of 64 times the sector size, which is the + // motivation for using it here. + long chunkSize = 64 * img.getSsize(); + chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; + + int totalChunks = (int) Math.ceil(size / chunkSize); + logger.log(Level.INFO, "Total chunks = {0}", totalChunks); + int read; + + byte[] data; + statusHelper.switchToDeterminate(totalChunks); + + running = true; + // Read in byte size chunks and update the hash value with the data. + for (int i = 0; i < totalChunks; i++) { + if (statusHelper.isCancelled()) { + running = false; + return; + } + data = new byte[ (int) chunkSize ]; + try { + read = img.read(data, i * chunkSize, chunkSize); + } catch (TskCoreException ex) { + String msg = "Error reading " + imgName + " at chunk " + i; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); + logger.log(Level.SEVERE, msg, ex); + return; + } + messageDigest.update(data); + statusHelper.progress(i); + } + + // Finish generating the hash and get it as a string value + calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); + verified = calculatedHash.equals(storedHash); + logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash}); + running = false; + } + + @Override + public void complete() { + logger.log(Level.INFO, "complete() {0}", getDisplayName()); + if (skipped == false) { + String msg = verified ? " verified" : " not verified"; + String extra = "

    EWF Verification Results for " + imgName + "

    "; + extra += "
  • Result:" + msg + "
  • "; + extra += "
  • Calculated hash: " + calculatedHash + "
  • "; + extra += "
  • Stored hash: " + storedHash + "
  • "; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); + logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg}); + } + } + + @Override + public void stop() { + } +}